diff --git a/server/.gitignore b/server/.gitignore
new file mode 100644
index 00000000..599aa67c
--- /dev/null
+++ b/server/.gitignore
@@ -0,0 +1,29 @@
+__pycache__/
+dist
+cabal-dev
+*.o
+*.hi
+*.chi
+*.chs.h
+*.dyn_o
+*.dyn_hi
+.virtualenv
+.hpc
+.hsenv
+.cabal-sandbox/
+cabal.sandbox.config
+cabal.config
+*.prof
+*.aux
+*.hp
+TAGS
+.stack-work
+
+# ws related
+ws/
+build/rootfs
+
+random*.sql
+
+# example related
+sample/data
diff --git a/server/Makefile b/server/Makefile
new file mode 100644
index 00000000..600dd80a
--- /dev/null
+++ b/server/Makefile
@@ -0,0 +1,47 @@
+# Check that given variables are set and all have non-empty values,
+# die with an error otherwise.
+#
+# Params:
+# 1. Variable name(s) to test.
+# 2. (optional) Error message to print.
+check_defined = \
+ $(strip $(foreach 1,$1, \
+ $(call __check_defined,$1,$(strip $(value 2)))))
+__check_defined = \
+ $(if $(value $1),, \
+ $(error $1$(if $2, ($2)) is not set))
+
+project := graphql-engine
+registry := hasura
+packager_ver := 1.0
+
+project_dir := $(shell pwd)
+build_dir := $(project_dir)/$(shell stack --docker path --dist-dir)/build
+
+build-dir: $(project).cabal
+ @:$(call check_defined, IMAGE_VERSION IMAGE_REPO STACK_FLAGS)
+ stack $(STACK_FLAGS) build $(BUILD_FLAGS)
+ mkdir -p packaging/build/rootfs
+ docker run --rm -v $(build_dir)/$(project)/$(project):/root/$(project) $(registry)/graphql-engine-packager:$(packager_ver) /build.sh $(project) | tar -x -C build/rootfs
+
+image: $(project).cabal
+ @:$(call check_defined, IMAGE_VERSION IMAGE_REPO STACK_FLAGS)
+ make build-dir STACK_FLAGS=$(STACK_FLAGS) IMAGE_REPO=$(IMAGE_REPO) IMAGE_VERSION=$(IMAGE_VERSION)
+ ln -f build/rootfs/bin/$(project) build/rootfs/bin/graphql-engine
+ docker build -t $(IMAGE_REPO)/$(project):$(IMAGE_VERSION) packaging/build/
+
+release-image: $(project).cabal
+ @:$(call check_defined, IMAGE_VERSION IMAGE_REPO STACK_FLAGS)
+ rm -rf .stack-work
+ make build-dir STACK_FLAGS=$(STACK_FLAGS) IMAGE_REPO=$(IMAGE_REPO) IMAGE_VERSION=$(IMAGE_VERSION)
+ strip --strip-unneeded build/rootfs/bin/$(project)
+ upx packaging/build/rootfs/bin/$(project)
+ ln -f packaging/build/rootfs/bin/$(project) build/rootfs/bin/graphql-engine
+ docker packaging/build -t $(IMAGE_REPO)/$(project):$(IMAGE_VERSION) packaging/build/
+
+push: $(project).cabal
+ @:$(call check_defined, IMAGE_VERSION IMAGE_REPO)
+ docker push $(IMAGE_REPO)/$(project):$(IMAGE_VERSION)
+
+packager: packaging/packager.df
+ docker build -t "$(registry)/graphql-engine-packager:$(packager_ver)" -f packaging/packager.df ./packaging/
diff --git a/server/Setup.hs b/server/Setup.hs
new file mode 100644
index 00000000..9a994af6
--- /dev/null
+++ b/server/Setup.hs
@@ -0,0 +1,2 @@
+import Distribution.Simple
+main = defaultMain
diff --git a/server/graphiql/.env b/server/graphiql/.env
new file mode 100644
index 00000000..4f79a0f8
--- /dev/null
+++ b/server/graphiql/.env
@@ -0,0 +1 @@
+GENERATE_SOURCEMAP=false
\ No newline at end of file
diff --git a/server/graphiql/.gitignore b/server/graphiql/.gitignore
new file mode 100644
index 00000000..d30f40ef
--- /dev/null
+++ b/server/graphiql/.gitignore
@@ -0,0 +1,21 @@
+# See https://help.github.com/ignore-files/ for more about ignoring files.
+
+# dependencies
+/node_modules
+
+# testing
+/coverage
+
+# production
+/build
+
+# misc
+.DS_Store
+.env.local
+.env.development.local
+.env.test.local
+.env.production.local
+
+npm-debug.log*
+yarn-debug.log*
+yarn-error.log*
diff --git a/server/graphiql/package.json b/server/graphiql/package.json
new file mode 100644
index 00000000..a1e78ec6
--- /dev/null
+++ b/server/graphiql/package.json
@@ -0,0 +1,18 @@
+{
+ "name": "react-app",
+ "version": "0.1.0",
+ "private": true,
+ "dependencies": {
+ "graphiql": "^0.11.11",
+ "graphql": "^0.13.2",
+ "react": "^16.2.0",
+ "react-dom": "^16.2.0",
+ "react-scripts": "1.1.1"
+ },
+ "scripts": {
+ "start": "react-scripts start",
+ "build": "react-scripts build",
+ "test": "react-scripts test --env=jsdom",
+ "eject": "react-scripts eject"
+ }
+}
diff --git a/server/graphiql/public/favicon.ico b/server/graphiql/public/favicon.ico
new file mode 100644
index 00000000..a11777cc
Binary files /dev/null and b/server/graphiql/public/favicon.ico differ
diff --git a/server/graphiql/public/index.html b/server/graphiql/public/index.html
new file mode 100644
index 00000000..ed0ebafa
--- /dev/null
+++ b/server/graphiql/public/index.html
@@ -0,0 +1,40 @@
+
+
+
+
+
+
+
+
+
+
+ React App
+
+
+
+
+
+
+
diff --git a/server/graphiql/public/manifest.json b/server/graphiql/public/manifest.json
new file mode 100644
index 00000000..ef19ec24
--- /dev/null
+++ b/server/graphiql/public/manifest.json
@@ -0,0 +1,15 @@
+{
+ "short_name": "React App",
+ "name": "Create React App Sample",
+ "icons": [
+ {
+ "src": "favicon.ico",
+ "sizes": "64x64 32x32 24x24 16x16",
+ "type": "image/x-icon"
+ }
+ ],
+ "start_url": "./index.html",
+ "display": "standalone",
+ "theme_color": "#000000",
+ "background_color": "#ffffff"
+}
diff --git a/server/graphiql/src/App.js b/server/graphiql/src/App.js
new file mode 100644
index 00000000..6611ba2a
--- /dev/null
+++ b/server/graphiql/src/App.js
@@ -0,0 +1,41 @@
+import React, { Component } from 'react';
+// import logo from './logo.svg';
+import '../node_modules/graphiql/graphiql.css';
+
+import GraphiQL from 'graphiql';
+import fetch from 'isomorphic-fetch';
+
+import {query, variables} from './graphiql-vars';
+
+const {
+ parse,
+ buildASTSchema
+} = require('graphql');
+
+// const ravenUrl = process.env.RAVEN_URL || 'http://localhost:8080';
+// const ravenUrl = window.location.hostname;
+
+class App extends Component {
+
+ render() {
+
+ const graphQLFetcher = function(graphQLParams) {
+ return fetch('/v1alpha1/graphql', {
+ method: 'post',
+ headers: { 'Content-Type': 'application/json'
+ },
+ body: JSON.stringify(graphQLParams)
+ }).then(response => response.json());
+ };
+
+ var content = ;
+
+ return (
+
+ {content}
+
+ );
+ }
+}
+
+export default App;
diff --git a/server/graphiql/src/graphiql-vars.js b/server/graphiql/src/graphiql-vars.js
new file mode 100644
index 00000000..07d14d52
--- /dev/null
+++ b/server/graphiql/src/graphiql-vars.js
@@ -0,0 +1,80 @@
+var query = `
+query albums_tracks_genre_all {
+ albums {
+ id
+ title
+ tracks {
+ id
+ name
+ genre {
+ name
+ }
+ }
+ }
+}
+
+query albums_tracks_genre_some {
+ albums (where: {artist_id: {_eq: 127}}){
+ id
+ title
+ tracks {
+ id
+ name
+ genre {
+ name
+ }
+ }
+ }
+}
+
+query tracks_media_all {
+ tracks {
+ id
+ name
+ media_type {
+ name
+ }
+ }
+}
+
+query tracks_media_some {
+ tracks (where: {composer: {_eq: "Kurt Cobain"}}){
+ id
+ name
+ album {
+ id
+ title
+ }
+ media_type {
+ name
+ }
+ }
+}
+
+query artists_collaboration {
+ artists(where: {
+ albums: {
+ tracks: {
+ composer: {_eq: "Ludwig van Beethoven"}
+ }
+ }
+ })
+ {
+ id
+ name
+ }
+}
+
+query artistByArtistId {
+ artists(where: {id: {_eq: 3}}) {
+ id
+ name
+ }
+}
+`;
+var variables=`
+{
+}
+`;
+exports.query = query;
+exports.variables = variables;
diff --git a/server/graphiql/src/index.css b/server/graphiql/src/index.css
new file mode 100644
index 00000000..f3ec83d7
--- /dev/null
+++ b/server/graphiql/src/index.css
@@ -0,0 +1,10 @@
+body {
+ height: 100%;
+ margin: 0;
+ width: 100%;
+ overflow: hidden;
+}
+
+.react-container-graphql {
+ height: 100vh;
+}
diff --git a/server/graphiql/src/index.js b/server/graphiql/src/index.js
new file mode 100644
index 00000000..849824be
--- /dev/null
+++ b/server/graphiql/src/index.js
@@ -0,0 +1,6 @@
+import React from 'react';
+import ReactDOM from 'react-dom';
+import './index.css';
+import App from './App';
+
+ReactDOM.render(, document.getElementById('root'));
diff --git a/server/graphiql/src/logo.svg b/server/graphiql/src/logo.svg
new file mode 100644
index 00000000..6b60c104
--- /dev/null
+++ b/server/graphiql/src/logo.svg
@@ -0,0 +1,7 @@
+
diff --git a/server/graphql-engine.cabal b/server/graphql-engine.cabal
new file mode 100644
index 00000000..18421dba
--- /dev/null
+++ b/server/graphql-engine.cabal
@@ -0,0 +1,220 @@
+name: graphql-engine
+version: 1.0.0
+synopsis: GraphQL API over Postgres
+-- description:
+homepage: https://www.hasura.io
+license: AllRightsReserved
+-- license-file: LICENSE
+author: Vamshi Surabhi
+maintainer: vamshi@hasura.io
+copyright: 2015 - 2017, 34 Cross Systems Pvt. Ltd, 2017-2018 Hasura Techonologies Pvt. Ltd
+category: Database
+build-type: Simple
+-- extra-source-files: README.md
+cabal-version: >=1.10
+
+source-repository head
+ type: git
+ location: https://github.com/hasura/graphql-engine
+
+flag developer
+ description: operate in developer mode
+ default: False
+ manual: True
+
+library
+ default-extensions: NoImplicitPrelude
+ hs-source-dirs: src-lib
+ default-language: Haskell2010
+ build-depends: base
+ , pg-client
+ , text
+ , bytestring
+ , postgresql-libpq
+ , mtl
+ , aeson
+ , aeson-casing
+ , unordered-containers
+ , template-haskell
+ , hashable
+ , transformers
+ , transformers-base
+ , http-types
+ , attoparsec
+ , time
+ , scientific
+ , Spock-core
+ , split
+ , optparse-applicative
+ , wai-extra
+ , containers
+ , hashtables
+ , resource-pool
+ , monad-control
+ , regex-tdfa
+ , wai-logger
+ , fast-logger
+ , wai
+ , postgresql-binary
+ , file-embed
+
+ -- Encoder related
+ , uuid
+ , reinterpret-cast
+ , vector
+
+ -- Logging related
+ , network
+ , byteorder
+
+ -- hashing for logging
+ , cryptonite
+
+ -- Transaction related
+ , focus
+ , list-t
+ , stm
+ , stm-containers
+ , alarmclock
+
+ -- Server related
+ , warp
+ , th-lift-instances
+ , lens
+
+ -- GraphQL related
+ , graphql-parser
+ , wai-middleware-static
+
+ -- URL parser related
+ , network-uri
+
+ -- String related
+ , case-insensitive
+ , string-conversions
+
+ -- Http client
+ , wreq
+ , http-client
+ , http-client-tls
+ , connection
+
+ --
+ , protolude
+ , data-has
+
+ exposed-modules: Hasura.Server.App
+ , Hasura.Server.Init
+ , Hasura.Server.Middleware
+ , Hasura.Server.Logging
+ , Hasura.Server.Query
+ , Hasura.Server.Utils
+ , Hasura.RQL.Types
+ , Hasura.RQL.Instances
+ , Hasura.RQL.Types.SchemaCache
+ , Hasura.RQL.Types.Common
+ , Hasura.RQL.Types.Permission
+ , Hasura.RQL.Types.Error
+ , Hasura.RQL.Types.DML
+ , Hasura.RQL.DDL.Deps
+ , Hasura.RQL.DDL.Permission.Internal
+ , Hasura.RQL.DDL.Permission
+ , Hasura.RQL.DDL.Relationship
+ , Hasura.RQL.DDL.QueryTemplate
+ , Hasura.RQL.DDL.Schema.Table
+ , Hasura.RQL.DDL.Schema.Diff
+ , Hasura.RQL.DDL.Metadata
+ , Hasura.RQL.DDL.Utils
+ , Hasura.RQL.DML.Delete
+ , Hasura.RQL.DML.Explain
+ , Hasura.RQL.DML.Internal
+ , Hasura.RQL.DML.Insert
+ , Hasura.RQL.DML.Returning
+ , Hasura.RQL.DML.Select
+ , Hasura.RQL.DML.Update
+ , Hasura.RQL.DML.Count
+ , Hasura.RQL.DML.QueryTemplate
+ , Hasura.RQL.GBoolExp
+
+ , Hasura.GraphQL.Execute
+ , Hasura.GraphQL.Execute.Result
+ , Hasura.GraphQL.Schema
+ , Hasura.GraphQL.OrderedMap
+ , Hasura.GraphQL.NonEmptySeq
+ , Hasura.GraphQL.Utils
+ , Hasura.GraphQL.Validate.Types
+ , Hasura.GraphQL.Validate.Context
+ , Hasura.GraphQL.Validate.Field
+ , Hasura.GraphQL.Validate.InputValue
+ , Hasura.GraphQL.Resolve.BoolExp
+ , Hasura.GraphQL.Resolve.Context
+ , Hasura.GraphQL.Resolve.InputValue
+ , Hasura.GraphQL.Resolve.Introspect
+ , Hasura.GraphQL.Resolve.Mutation
+ , Hasura.GraphQL.Resolve.Select
+
+ , Data.Text.Extended
+
+ , Hasura.SQL.DML
+ , Hasura.SQL.Types
+ , Hasura.SQL.Value
+ , Hasura.SQL.GeoJSON
+ , Hasura.SQL.Time
+ , Hasura.Prelude
+
+ if flag(developer)
+ ghc-prof-options: -rtsopts -fprof-auto -fno-prof-count-entries
+
+ ghc-options: -O2 -Wall
+
+executable graphql-engine
+ default-extensions: NoImplicitPrelude
+ main-is: Main.hs
+ default-language: Haskell2010
+ hs-source-dirs: src-exec
+ build-depends: base
+ , Spock-core >= 0.11
+ , graphql-engine
+ , aeson >= 1.0
+ , bytestring >= 0.10
+ , mtl
+ , optparse-applicative >= 0.12
+ , yaml
+ , template-haskell >= 2.11
+ , time >= 1.6
+ , text
+ , lens
+ , unordered-containers >= 0.2
+ , pg-client
+ other-modules: Ops
+ TH
+
+ if flag(developer)
+ ghc-prof-options: -rtsopts -fprof-auto -fno-prof-count-entries
+
+ ghc-options: -O2 -Wall -threaded
+
+executable graphql-engine-test
+ main-is: Main.hs
+ Default-Language: Haskell2010
+ Hs-Source-Dirs: test
+ ghc-options: -O2 -Wall
+ Build-Depends: Spock-core >= 0.11
+ , aeson
+ , base
+ , bytestring
+ , directory
+ , fgl
+ , filepath
+ , hspec
+ , hspec-core
+ , hspec-wai
+ , http-types
+ , optparse-applicative
+ , graphql-engine -any
+ , text
+ , transformers
+ , wai
+ , wai-extra
+ , unordered-containers
+ , pg-client
diff --git a/server/packaging/build/Dockerfile b/server/packaging/build/Dockerfile
new file mode 100644
index 00000000..b7d0a786
--- /dev/null
+++ b/server/packaging/build/Dockerfile
@@ -0,0 +1,2 @@
+FROM scratch
+COPY rootfs/ /
diff --git a/server/packaging/packager.df b/server/packaging/packager.df
new file mode 100644
index 00000000..cf361529
--- /dev/null
+++ b/server/packaging/packager.df
@@ -0,0 +1,5 @@
+FROM hasura/haskell-docker-packager:1.1
+MAINTAINER vamshi@34cross.in
+
+RUN apt-get update && apt-get install -y libpq5 \
+ && rm -rf /var/lib/apt/lists/*
\ No newline at end of file
diff --git a/server/src-exec/Main.hs b/server/src-exec/Main.hs
new file mode 100644
index 00000000..480cfd26
--- /dev/null
+++ b/server/src-exec/Main.hs
@@ -0,0 +1,147 @@
+{-# LANGUAGE FlexibleContexts #-}
+{-# LANGUAGE OverloadedStrings #-}
+
+module Main where
+
+import Ops
+
+import Data.Time.Clock (getCurrentTime)
+import Options.Applicative
+import System.Exit (exitFailure)
+import Web.Spock.Core (runSpockNoBanner, spockT)
+
+import qualified Data.Aeson as A
+import qualified Data.ByteString.Char8 as BC
+import qualified Data.ByteString.Lazy as BL
+import qualified Data.ByteString.Lazy.Char8 as BLC
+import qualified Data.Text as T
+import qualified Data.Yaml as Y
+
+import Hasura.Prelude
+import Hasura.RQL.DDL.Metadata (fetchMetadata)
+import Hasura.Server.App (AuthMode (..), app, ravenLogGen)
+import Hasura.Server.Init
+import Hasura.Server.Logging (withStdoutLogger)
+
+import qualified Database.PG.Query as Q
+
+data RavenOptions
+ = RavenOptions
+ { roConnInfo :: !RawConnInfo
+ , roMode :: !RavenMode
+ } deriving (Show, Eq)
+
+data ServeOptions
+ = ServeOptions
+ { soPort :: !Int
+ , soConnParams :: !Q.ConnParams
+ , soTxIso :: !Q.TxIsolation
+ , soRootDir :: !(Maybe String)
+ , soAccessKey :: !(Maybe AccessKey)
+ , soCorsConfig :: !CorsConfig
+ , soWebHook :: !(Maybe T.Text)
+ } deriving (Show, Eq)
+
+data RavenMode
+ = ROServe !ServeOptions
+ | ROExport
+ | ROClean
+ | ROExecute
+ deriving (Show, Eq)
+
+parseRavenMode :: Parser RavenMode
+parseRavenMode = subparser
+ ( command "serve" (info (helper <*> serveOptsParser)
+ ( progDesc "Start the HTTP api server" ))
+ <> command "export" (info (pure ROExport)
+ ( progDesc "Export raven's schema to stdout" ))
+ <> command "clean" (info (pure ROClean)
+ ( progDesc "Clean raven's metadata to start afresh" ))
+ <> command "execute" (info (pure ROExecute)
+ ( progDesc "Execute a query" ))
+ )
+ where
+ serveOptsParser = ROServe <$> serveOpts
+ serveOpts = ServeOptions
+ <$> parseServerPort
+ <*> parseConnParams
+ <*> parseTxIsolation
+ <*> parseRootDir
+ <*> parseAccessKey
+ <*> parseCorsConfig
+ <*> parseWebHook
+
+parseArgs :: IO RavenOptions
+parseArgs = execParser opts
+ where
+ optParser = RavenOptions <$> parseRawConnInfo <*> parseRavenMode
+ opts = info (helper <*> optParser)
+ ( fullDesc <>
+ header "raven - Hasura's datastore")
+
+printJSON :: (A.ToJSON a) => a -> IO ()
+printJSON = BLC.putStrLn . A.encode
+
+printYaml :: (A.ToJSON a) => a -> IO ()
+printYaml = BC.putStrLn . Y.encode
+
+mkAuthMode :: Maybe AccessKey -> Maybe T.Text -> Either String AuthMode
+mkAuthMode mAccessKey mWebHook =
+ case (mAccessKey, mWebHook) of
+ (Nothing, Nothing) -> return AMNoAuth
+ (Just key, Nothing) -> return $ AMAccessKey key
+ (Nothing, Just _) -> throwError $
+ "Fatal Error : --auth-hook requires --access-key to be set"
+ (Just key, Just hook) -> return $ AMAccessKeyAndHook key hook
+
+main :: IO ()
+main = withStdoutLogger ravenLogGen $ \rlogger -> do
+ (RavenOptions rci ravenMode) <- parseArgs
+ ci <- either ((>> exitFailure) . (putStrLn . connInfoErrModifier))
+ return $ mkConnInfo rci
+ printConnInfo ci
+ case ravenMode of
+ ROServe (ServeOptions port cp isoL mRootDir mAccessKey corsCfg mWebHook) -> do
+ am <- either ((>> exitFailure) . putStrLn) return $
+ mkAuthMode mAccessKey mWebHook
+ initialise ci
+ migrate ci
+ pool <- Q.initPGPool ci cp
+ runSpockNoBanner port $ do
+ putStrLn $ "server: running on port " ++ show port
+ spockT id $ app isoL mRootDir rlogger pool am corsCfg
+ ROExport -> do
+ res <- runTx ci fetchMetadata
+ either ((>> exitFailure) . printJSON) printJSON res
+ ROClean -> do
+ res <- runTx ci cleanCatalog
+ either ((>> exitFailure) . printJSON) (const cleanSuccess) res
+ ROExecute -> do
+ queryBs <- BL.getContents
+ res <- runTx ci $ execQuery queryBs
+ either ((>> exitFailure) . printJSON) BLC.putStrLn res
+ where
+ runTx ci tx = do
+ pool <- getMinimalPool ci
+ runExceptT $ Q.runTx pool (Q.Serializable, Nothing) tx
+ getMinimalPool ci = do
+ let connParams = Q.defaultConnParams { Q.cpConns = 1 }
+ Q.initPGPool ci connParams
+ initialise ci = do
+ currentTime <- getCurrentTime
+ res <- runTx ci $ initCatalogSafe currentTime
+ either ((>> exitFailure) . printJSON) putStrLn res
+ migrate ci = do
+ currentTime <- getCurrentTime
+ res <- runTx ci $ migrateCatalog currentTime
+ either ((>> exitFailure) . printJSON) putStrLn res
+
+ cleanSuccess = putStrLn "successfully cleaned raven related data"
+
+ printConnInfo ci =
+ putStrLn $
+ "Postgres connection info:"
+ ++ "\n Host: " ++ Q.connHost ci
+ ++ "\n Port: " ++ show (Q.connPort ci)
+ ++ "\n User: " ++ Q.connUser ci
+ ++ "\n Database: " ++ Q.connDatabase ci
diff --git a/server/src-exec/Ops.hs b/server/src-exec/Ops.hs
new file mode 100644
index 00000000..780499d0
--- /dev/null
+++ b/server/src-exec/Ops.hs
@@ -0,0 +1,167 @@
+{-# LANGUAGE FlexibleContexts #-}
+{-# LANGUAGE MultiWayIf #-}
+
+{-# LANGUAGE OverloadedStrings #-}
+{-# LANGUAGE QuasiQuotes #-}
+{-# LANGUAGE TemplateHaskell #-}
+
+module Ops
+ ( initCatalogSafe
+ , cleanCatalog
+ , migrateCatalog
+ , execQuery
+ ) where
+
+import TH
+
+import Hasura.Prelude
+import Hasura.RQL.DDL.Schema.Table
+import Hasura.RQL.Types
+import Hasura.Server.Query
+import Hasura.SQL.Types
+
+import qualified Database.PG.Query as Q
+
+import Data.Time.Clock (UTCTime)
+
+import qualified Data.Aeson as A
+import qualified Data.ByteString.Lazy as BL
+import qualified Data.Text as T
+
+curCatalogVer :: T.Text
+curCatalogVer = "1"
+
+initCatalogSafe :: UTCTime -> Q.TxE QErr String
+initCatalogSafe initTime = do
+ hdbCatalogExists <- Q.catchE defaultTxErrorHandler $
+ doesSchemaExist $ SchemaName "hdb_catalog"
+ bool (initCatalogStrict True initTime) onCatalogExists hdbCatalogExists
+ where
+ onCatalogExists = do
+ versionExists <- Q.catchE defaultTxErrorHandler $
+ doesVersionTblExist
+ (SchemaName "hdb_catalog") (TableName "hdb_version")
+ bool (initCatalogStrict False initTime) (return initialisedMsg) versionExists
+
+ initialisedMsg = "initialise: the state is already initialised"
+
+ doesVersionTblExist sn tblN =
+ (runIdentity . Q.getRow) <$> Q.withQ [Q.sql|
+ SELECT EXISTS (
+ SELECT 1
+ FROM pg_tables
+ WHERE schemaname = $1 AND tablename = $2)
+ |] (sn, tblN) False
+
+ doesSchemaExist sn =
+ (runIdentity . Q.getRow) <$> Q.withQ [Q.sql|
+ SELECT EXISTS (
+ SELECT 1
+ FROM information_schema.schemata
+ WHERE schema_name = $1
+ )
+ |] (Identity sn) False
+
+initCatalogStrict :: Bool -> UTCTime -> Q.TxE QErr String
+initCatalogStrict createSchema initTime = do
+ Q.catchE defaultTxErrorHandler $ do
+
+ when createSchema $ do
+ Q.unitQ "CREATE SCHEMA hdb_catalog" () False
+ -- This is where the generated views and triggers are stored
+ Q.unitQ "CREATE SCHEMA hdb_views" () False
+
+ flExtExists <- isExtInstalled "first_last_agg"
+ case flExtExists of
+ True -> Q.unitQ "CREATE EXTENSION first_last_agg SCHEMA hdb_catalog" () False
+ False -> Q.multiQ $(Q.sqlFromFile "src-rsr/first_last.sql") >>= \(Q.Discard _) -> return ()
+ Q.Discard () <- Q.multiQ $(Q.sqlFromFile "src-rsr/initialise.sql")
+ return ()
+
+ -- Build the metadata query
+ tx <- liftEither $ buildTxAny adminUserInfo emptySchemaCache metadataQuery
+
+ -- Execute the query
+ void $ snd <$> tx
+ setAsSystemDefined >> addVersion initTime
+ return "initialise: successfully initialised"
+ where
+ addVersion modTime = Q.catchE defaultTxErrorHandler $
+ Q.unitQ [Q.sql|
+ INSERT INTO "hdb_catalog"."hdb_version" VALUES ($1, $2)
+ |] (curCatalogVer, modTime) False
+
+ setAsSystemDefined = Q.catchE defaultTxErrorHandler $ do
+ Q.unitQ "UPDATE hdb_catalog.hdb_table SET is_system_defined = 'true'" () False
+ Q.unitQ "UPDATE hdb_catalog.hdb_relationship SET is_system_defined = 'true'" () False
+ Q.unitQ "UPDATE hdb_catalog.hdb_permission SET is_system_defined = 'true'" () False
+ Q.unitQ "UPDATE hdb_catalog.hdb_query_template SET is_system_defined = 'true'" () False
+
+ isExtInstalled :: T.Text -> Q.Tx Bool
+ isExtInstalled sn =
+ (runIdentity . Q.getRow) <$> Q.withQ [Q.sql|
+ SELECT EXISTS (
+ SELECT 1
+ FROM pg_catalog.pg_available_extensions
+ WHERE name = $1
+ )
+ |] (Identity sn) False
+
+
+cleanCatalog :: Q.TxE QErr ()
+cleanCatalog = Q.catchE defaultTxErrorHandler $ do
+ -- This is where the generated views and triggers are stored
+ Q.unitQ "DROP SCHEMA IF EXISTS hdb_views CASCADE" () False
+ Q.unitQ "DROP SCHEMA hdb_catalog CASCADE" () False
+
+getCatalogVersion :: Q.TxE QErr T.Text
+getCatalogVersion = do
+ res <- Q.withQE defaultTxErrorHandler [Q.sql|
+ SELECT version FROM hdb_catalog.hdb_version
+ |] () False
+ return $ runIdentity $ Q.getRow res
+
+migrateFrom08 :: Q.TxE QErr ()
+migrateFrom08 = Q.catchE defaultTxErrorHandler $ do
+ Q.unitQ "ALTER TABLE hdb_catalog.hdb_relationship ADD COLUMN comment TEXT NULL" () False
+ Q.unitQ "ALTER TABLE hdb_catalog.hdb_permission ADD COLUMN comment TEXT NULL" () False
+ Q.unitQ "ALTER TABLE hdb_catalog.hdb_query_template ADD COLUMN comment TEXT NULL" () False
+ Q.unitQ [Q.sql|
+ UPDATE hdb_catalog.hdb_query_template
+ SET template_defn =
+ json_build_object('type', 'select', 'args', template_defn->'select');
+ |] () False
+
+migrateCatalog :: UTCTime -> Q.TxE QErr String
+migrateCatalog migrationTime = do
+ preVer <- getCatalogVersion
+ if | preVer == curCatalogVer ->
+ return "migrate: already at the latest version"
+ | preVer == "0.8" -> do
+ migrateFrom08
+ -- update the catalog version
+ updateVersion
+ -- clean hdb_views
+ Q.unitQE defaultTxErrorHandler "DROP SCHEMA IF EXISTS hdb_views CASCADE" () False
+ Q.unitQE defaultTxErrorHandler "CREATE SCHEMA hdb_views" () False
+ -- try building the schema cache
+ void buildSchemaCache
+ return "migrate: successfully migrated"
+ | otherwise -> throw400 NotSupported $
+ "migrate: unsupported version : " <> preVer
+ where
+ updateVersion =
+ Q.unitQE defaultTxErrorHandler [Q.sql|
+ UPDATE "hdb_catalog"."hdb_version"
+ SET "version" = $1,
+ "upgraded_on" = $2
+ |] (curCatalogVer, migrationTime) False
+
+execQuery :: BL.ByteString -> Q.TxE QErr BL.ByteString
+execQuery queryBs = do
+ query <- case A.decode queryBs of
+ Just jVal -> decodeValue jVal
+ Nothing -> throw400 InvalidJSON "invalid json"
+ schemaCache <- buildSchemaCache
+ tx <- liftEither $ buildTxAny adminUserInfo schemaCache query
+ fst <$> tx
diff --git a/server/src-exec/TH.hs b/server/src-exec/TH.hs
new file mode 100644
index 00000000..64a1e7d6
--- /dev/null
+++ b/server/src-exec/TH.hs
@@ -0,0 +1,17 @@
+{-# LANGUAGE FlexibleContexts #-}
+{-# LANGUAGE OverloadedStrings #-}
+{-# LANGUAGE QuasiQuotes #-}
+{-# LANGUAGE TemplateHaskell #-}
+
+module TH
+ ( metadataQuery
+ ) where
+
+import Language.Haskell.TH.Syntax (Q, TExp, unTypeQ)
+
+import qualified Data.Yaml.TH as Y
+
+import Hasura.Server.Query
+
+metadataQuery :: RQLQuery
+metadataQuery = $(unTypeQ (Y.decodeFile "src-rsr/hdb_metadata.yaml" :: Q (TExp RQLQuery)))
diff --git a/server/src-lib/Data/Text/Extended.hs b/server/src-lib/Data/Text/Extended.hs
new file mode 100644
index 00000000..f769b5ea
--- /dev/null
+++ b/server/src-lib/Data/Text/Extended.hs
@@ -0,0 +1,29 @@
+{-# LANGUAGE OverloadedStrings #-}
+module Data.Text.Extended
+ ( module DT
+ , squote
+ , dquote
+ , paren
+ , (<->)
+ ) where
+
+import Hasura.Prelude
+
+import Data.Text as DT
+
+squote :: DT.Text -> DT.Text
+squote t = DT.singleton '\'' <> t <> DT.singleton '\''
+{-# INLINE squote #-}
+
+dquote :: DT.Text -> DT.Text
+dquote t = DT.singleton '"' <> t <> DT.singleton '"'
+{-# INLINE dquote #-}
+
+paren :: DT.Text -> DT.Text
+paren t = "(" <> t <> ")"
+{-# INLINE paren #-}
+
+infixr 6 <->
+(<->) :: DT.Text -> DT.Text -> DT.Text
+(<->) l r = l <> DT.singleton ' ' <> r
+{-# INLINE (<->) #-}
diff --git a/server/src-lib/Hasura/GraphQL/Execute.hs b/server/src-lib/Hasura/GraphQL/Execute.hs
new file mode 100644
index 00000000..cda39bc2
--- /dev/null
+++ b/server/src-lib/Hasura/GraphQL/Execute.hs
@@ -0,0 +1,249 @@
+{-# LANGUAGE FlexibleContexts #-}
+{-# LANGUAGE NoImplicitPrelude #-}
+{-# LANGUAGE OverloadedStrings #-}
+{-# LANGUAGE TemplateHaskell #-}
+
+module Hasura.GraphQL.Execute
+ ( validateGQ
+ , GraphQLRequest
+ , runGQ
+ ) where
+
+import Data.Has
+import Hasura.Prelude
+
+import qualified Data.Aeson as J
+import qualified Data.Aeson.Casing as J
+import qualified Data.Aeson.TH as J
+import qualified Data.ByteString.Lazy as BL
+import qualified Data.HashMap.Strict as Map
+import qualified Data.Text as T
+import qualified Data.Vector as V
+import qualified Database.PG.Query as Q
+import qualified Language.GraphQL.Draft.Parser as G
+import qualified Language.GraphQL.Draft.Syntax as G
+import qualified Hasura.Server.Query as RQ
+
+import Hasura.GraphQL.Execute.Result
+import Hasura.GraphQL.Resolve.Context
+import Hasura.GraphQL.Resolve.Introspect
+import Hasura.GraphQL.Schema
+import Hasura.GraphQL.Validate.Context
+import Hasura.GraphQL.Validate.Field
+import Hasura.GraphQL.Validate.InputValue
+import Hasura.GraphQL.Validate.Types
+import Hasura.RQL.Types
+import Hasura.SQL.Types
+
+import qualified Hasura.GraphQL.Resolve.Mutation as RM
+import qualified Hasura.GraphQL.Resolve.Select as RS
+
+newtype GraphQLQuery
+ = GraphQLQuery { unGraphQLQuery :: [G.ExecutableDefinition] }
+ deriving (Show, Eq)
+
+instance J.FromJSON GraphQLQuery where
+ parseJSON = J.withText "GraphQLQuery" $ \t ->
+ case G.parseExecutableDoc t of
+ Left _ -> fail "parsing the graphql query failed"
+ Right q -> return $ GraphQLQuery $ G.getExecutableDefinitions q
+
+newtype OperationName
+ = OperationName { _unOperationName :: G.Name }
+ deriving (Show, Eq)
+
+instance J.FromJSON OperationName where
+ parseJSON v = OperationName . G.Name <$> J.parseJSON v
+
+type VariableValues = Map.HashMap G.Variable J.Value
+
+data GraphQLRequest
+ = GraphQLRequest
+ { _grOperationName :: !(Maybe OperationName)
+ , _grQuery :: !GraphQLQuery
+ , _grVariables :: !(Maybe VariableValues)
+ } deriving (Show, Eq)
+
+$(J.deriveFromJSON (J.aesonDrop 3 J.camelCase){J.omitNothingFields=True}
+ ''GraphQLRequest
+ )
+
+getTypedOp
+ :: (MonadError QErr m)
+ => Maybe OperationName
+ -> [G.SelectionSet]
+ -> [G.TypedOperationDefinition]
+ -> m G.TypedOperationDefinition
+getTypedOp opNameM selSets opDefs =
+ case (opNameM, selSets, opDefs) of
+ (Just opName, [], _) -> do
+ let n = _unOperationName opName
+ opDefM = find (\opDef -> G._todName opDef == Just n) opDefs
+ onNothing opDefM $ throwVE $
+ "no such operation found in the document: " <> showName n
+ (Just _, _, _) ->
+ throwVE $ "operationName cannot be used when " <>
+ "an anonymous operation exists in the document"
+ (Nothing, [selSet], []) ->
+ return $ G.TypedOperationDefinition
+ G.OperationTypeQuery Nothing [] [] selSet
+ (Nothing, [], [opDef]) ->
+ return opDef
+ (Nothing, _, _) ->
+ throwVE $ "exactly one operation has to be present " <>
+ "in the document when operationName is not specified"
+
+-- For all the variables defined there will be a value in the final map
+-- If no default, not in variables and nullable, then null value
+getAnnVarVals
+ :: ( MonadReader r m, Has TypeMap r
+ , MonadError QErr m
+ )
+ => [G.VariableDefinition]
+ -> VariableValues
+ -> m AnnVarVals
+getAnnVarVals varDefsL inpVals = do
+
+ varDefs <- onLeft (mkMapWith G._vdVariable varDefsL) $ \dups ->
+ throwVE $ "the following variables are defined more than once: " <>
+ showVars dups
+
+ let unexpectedVars = filter (not . (`Map.member` varDefs)) $ Map.keys inpVals
+
+ unless (null unexpectedVars) $
+ throwVE $ "unexpected variables in variableValues: " <>
+ showVars unexpectedVars
+
+ forM varDefs $ \(G.VariableDefinition var ty defM) -> do
+ let baseTy = getBaseTy ty
+ baseTyInfo <- getTyInfoVE baseTy
+ -- check that the variable is defined on input types
+ when (isObjTy baseTyInfo) $ throwVE $ objTyErrMsg baseTy
+
+ let defM' = bool (defM <|> Just G.VCNull) defM $ G.isNotNull ty
+ annDefM <- withPathK "defaultValue" $
+ mapM (validateInputValue constValueParser ty) defM'
+ let inpValM = Map.lookup var inpVals
+ annInpValM <- withPathK "variableValues" $
+ mapM (validateInputValue jsonParser ty) inpValM
+ let varValM = annInpValM <|> annDefM
+ onNothing varValM $ throwVE $ "expecting a value for non-null type: "
+ <> G.showGT ty <> " in variableValues"
+ where
+ objTyErrMsg namedTy =
+ "variables can only be defined on input types"
+ <> "(enums, scalars, input objects), but "
+ <> showNamedTy namedTy <> " is an object type"
+
+ showVars :: (Functor f, Foldable f) => f G.Variable -> Text
+ showVars = showNames . fmap G.unVariable
+
+validateFrag
+ :: (MonadError QErr m, MonadReader r m, Has TypeMap r)
+ => G.FragmentDefinition -> m FragDef
+validateFrag (G.FragmentDefinition n onTy dirs selSet) = do
+ unless (null dirs) $ throwVE
+ "unexpected directives at fragment definition"
+ tyInfo <- getTyInfoVE onTy
+ objTyInfo <- onNothing (getObjTyM tyInfo) $ throwVE
+ "fragments can only be defined on object types"
+ return $ FragDef n objTyInfo selSet
+
+{-# SCC validateGQ #-}
+validateGQ
+ :: (MonadError QErr m, MonadReader GCtx m)
+ => GraphQLRequest
+ -> m SelSet
+validateGQ (GraphQLRequest opNameM q varValsM) = do
+
+ -- get the operation that needs to be evaluated
+ opDef <- getTypedOp opNameM selSets opDefs
+
+ ctx <- ask
+ -- get the operation root
+ opRoot <- case G._todType opDef of
+ G.OperationTypeQuery -> return $ _gQueryRoot ctx
+ G.OperationTypeMutation -> onNothing (_gMutRoot ctx) $ throwVE
+ "no mutations exist"
+ _ -> throwVE "subscriptions are not supported"
+
+ -- annotate the variables of this operation
+ annVarVals <- getAnnVarVals (G._todVariableDefinitions opDef) $
+ fromMaybe Map.empty varValsM
+
+ -- annotate the fragments
+ fragDefs <- onLeft (mkMapWith G._fdName fragDefsL) $ \dups ->
+ throwVE $ "the following fragments are defined more than once: " <>
+ showNames dups
+ annFragDefs <- mapM validateFrag fragDefs
+
+ -- build a validation ctx
+ let valCtx = ValidationCtx (_gTypes ctx) annVarVals annFragDefs
+
+ flip runReaderT valCtx $ denormSelSet [] opRoot $ G._todSelectionSet opDef
+
+ where
+ (selSets, opDefs, fragDefsL) = G.partitionExDefs $ unGraphQLQuery q
+
+{-# SCC buildTx #-}
+buildTx :: UserInfo -> GCtx -> Field -> Q.TxE QErr BL.ByteString
+buildTx userInfo gCtx fld = do
+ opCxt <- getOpCtx $ _fName fld
+ tx <- fmap fst $ runConvert (fldMap, orderByCtx) $ case opCxt of
+
+ OCSelect tn permFilter hdrs ->
+ validateHdrs hdrs >> RS.convertSelect tn permFilter fld
+ OCInsert tn vn cols hdrs ->
+ validateHdrs hdrs >> RM.convertInsert (tn, vn) cols fld
+ OCUpdate tn permFilter hdrs ->
+ validateHdrs hdrs >> RM.convertUpdate tn permFilter fld
+ OCDelete tn permFilter hdrs ->
+ validateHdrs hdrs >> RM.convertDelete tn permFilter fld
+ tx
+ where
+ opCtxMap = _gOpCtxMap gCtx
+ fldMap = _gFields gCtx
+ orderByCtx = _gOrdByEnums gCtx
+
+ getOpCtx f =
+ onNothing (Map.lookup f opCtxMap) $ throw500 $
+ "lookup failed: opctx: " <> showName f
+
+ validateHdrs hdrs = do
+ let receivedHdrs = map fst $ userHeaders userInfo
+ forM_ hdrs $ \hdr ->
+ unless (hdr `elem` map T.toLower receivedHdrs) $
+ throw400 NotFound $ hdr <<> " header is expected but not found"
+
+{-# SCC resolveFld #-}
+resolveFld
+ :: (MonadIO m, MonadError QErr m)
+ => Q.PGPool -> Q.TxIsolation
+ -> UserInfo -> GCtx
+ -> Field -> m BL.ByteString
+resolveFld pool isoL userInfo gCtx fld =
+ case _fName fld of
+ "__type" -> J.encode <$> runReaderT (typeR fld) gCtx
+ "__schema" -> J.encode <$> runReaderT (schemaR fld) gCtx
+ "__typename" -> return $ J.encode J.Null
+ _ -> runTx $ buildTx userInfo gCtx fld
+ where
+ runTx tx =
+ Q.runTx pool (isoL, Nothing) $
+ RQ.setHeadersTx userInfo >> tx
+
+runGQ
+ :: (MonadIO m, MonadError QErr m)
+ => Q.PGPool -> Q.TxIsolation
+ -> UserInfo -> GCtxMap
+ -> GraphQLRequest
+ -> m BL.ByteString
+runGQ pool isoL userInfo gCtxMap req = do
+ fields <- runReaderT (validateGQ req) gCtx
+ -- putLText $ J.encodeToLazyText $ J.toJSON fields
+ respFlds <- fmap V.fromList $ forM (toList fields) $ \fld -> do
+ fldResp <- resolveFld pool isoL userInfo gCtx fld
+ return (G.unName $ G.unAlias $ _fAlias fld, fldResp)
+ return $ encodeGQResp $ GQSuccess $ mkJSONObj respFlds
+ where
+ gCtx = getGCtx (userRole userInfo) gCtxMap
diff --git a/server/src-lib/Hasura/GraphQL/Execute/Result.hs b/server/src-lib/Hasura/GraphQL/Execute/Result.hs
new file mode 100644
index 00000000..fe5624e9
--- /dev/null
+++ b/server/src-lib/Hasura/GraphQL/Execute/Result.hs
@@ -0,0 +1,54 @@
+{-# LANGUAGE NoImplicitPrelude #-}
+{-# LANGUAGE OverloadedStrings #-}
+
+module Hasura.GraphQL.Execute.Result
+ ( encodeGQErr
+ , encodeJSONObject
+ , encodeGQResp
+ , mkJSONObj
+ , GQResp(..)
+ ) where
+
+import Hasura.Prelude
+
+import qualified Data.Aeson as J
+import qualified Data.ByteString.Builder as BB
+import qualified Data.ByteString.Lazy as BL
+import qualified Data.Text.Encoding as TE
+import qualified Data.Vector as V
+
+import Hasura.RQL.Types
+
+encodeGQErr :: Text -> QErr -> J.Value
+encodeGQErr role qErr =
+ J.object [ "errors" J..= [encodeQErr role qErr]]
+
+data GQResp
+ = GQSuccess BL.ByteString
+ | GQPreExecError [J.Value]
+ | GQExecError [J.Value]
+ deriving (Show, Eq)
+
+encodeJSONObject :: V.Vector (Text, BL.ByteString) -> BB.Builder
+encodeJSONObject xs
+ | V.null xs = BB.char7 '{' <> BB.char7 '}'
+ | otherwise = BB.char7 '{' <> builder' (V.unsafeHead xs) <>
+ V.foldr go (BB.char7 '}') (V.unsafeTail xs)
+ where
+ go v b = BB.char7 ',' <> builder' v <> b
+ -- builds "key":value from (key,value)
+ builder' (t, v) =
+ BB.char7 '"' <> TE.encodeUtf8Builder t <> BB.string7 "\":"
+ <> BB.lazyByteString v
+
+encodeGQResp :: GQResp -> BL.ByteString
+encodeGQResp gqResp =
+ buildBS $ case gqResp of
+ GQSuccess r -> V.singleton ("data", r)
+ GQPreExecError e -> V.singleton ("errors", J.encode e)
+ GQExecError e -> V.fromList [("data", "null"), ("errors", J.encode e)]
+ where
+ buildBS = BB.toLazyByteString . encodeJSONObject
+
+mkJSONObj :: V.Vector (Text, BL.ByteString) -> BL.ByteString
+mkJSONObj = BB.toLazyByteString . encodeJSONObject
diff --git a/server/src-lib/Hasura/GraphQL/NonEmptySeq.hs b/server/src-lib/Hasura/GraphQL/NonEmptySeq.hs
new file mode 100644
index 00000000..27f3a749
--- /dev/null
+++ b/server/src-lib/Hasura/GraphQL/NonEmptySeq.hs
@@ -0,0 +1,35 @@
+{-# LANGUAGE NoImplicitPrelude #-}
+
+module Hasura.GraphQL.NonEmptySeq
+ ( NESeq
+ , (<|)
+ , (|>)
+ , init
+ , head
+ , toSeq
+ ) where
+
+import qualified Data.Sequence as Seq
+import Hasura.Prelude hiding (head)
+
+infixr 5 <|
+infixl 5 |>
+
+newtype NESeq a
+ = NESeq { unNESeq :: (a, Seq.Seq a)}
+ deriving (Show, Eq)
+
+init :: a -> NESeq a
+init a = NESeq (a, Seq.empty)
+
+head :: NESeq a -> a
+head = fst . unNESeq
+
+(|>) :: NESeq a -> a -> NESeq a
+(NESeq (h, l)) |> v = NESeq (h, l Seq.|> v)
+
+(<|) :: a -> NESeq a -> NESeq a
+v <| (NESeq (h, l)) = NESeq (v, h Seq.<| l)
+
+toSeq :: NESeq a -> Seq.Seq a
+toSeq (NESeq (v, l)) = v Seq.<| l
diff --git a/server/src-lib/Hasura/GraphQL/OrderedMap.hs b/server/src-lib/Hasura/GraphQL/OrderedMap.hs
new file mode 100644
index 00000000..2c03fdce
--- /dev/null
+++ b/server/src-lib/Hasura/GraphQL/OrderedMap.hs
@@ -0,0 +1,83 @@
+{-# LANGUAGE NoImplicitPrelude #-}
+
+-- TODO: use insert-ordered-containers
+
+module Hasura.GraphQL.OrderedMap
+ ( empty
+ , elems
+ , insert
+ , lookup
+ , toList
+ , OMap
+ , groupTuples
+ , groupListWith
+ ) where
+
+import qualified Data.HashMap.Strict as Map
+import Hasura.Prelude hiding (toList)
+
+import qualified Hasura.GraphQL.NonEmptySeq as NE
+
+newtype OVal v =
+ OVal { _unOVal :: (Int, v) }
+ deriving (Show)
+
+getI :: OVal v -> Int
+getI = fst . _unOVal
+
+getV :: OVal v -> v
+getV = snd . _unOVal
+
+setV :: (v -> v) -> OVal v -> OVal v
+setV f (OVal (i, v)) = OVal (i, f v)
+
+newtype OMap k v =
+ OMap { _unOMap :: (Int, Map.HashMap k (OVal v)) }
+ deriving (Show)
+
+empty :: OMap k v
+empty = OMap (0, Map.empty)
+
+insert :: (Eq k, Hashable k) => k -> v -> OMap k v -> OMap k v
+insert k v (OMap (index, m)) =
+ OMap (index + 1, Map.insert k (OVal (index, v)) m)
+
+insertWith
+ :: (Eq k, Hashable k)
+ => (Maybe v -> v) -> k -> OMap k v -> OMap k v
+insertWith f k oMap@(OMap (index, m)) =
+ case Map.lookup k m of
+ Just ov ->
+ let newVal = setV (f . Just) ov
+ in OMap (index, Map.insert k newVal m)
+ Nothing ->
+ insert k (f Nothing) oMap
+
+toList :: OMap k v -> [(k, v)]
+toList (OMap (_, m)) =
+ [ (k, getV ov) | (k, ov) <- orderedList]
+ where
+ orderedList =
+ sortBy (comparing (getI . snd)) $ Map.toList m
+
+elems :: OMap k v -> [v]
+elems = map snd . toList
+
+lookup :: (Hashable k, Eq k) => k -> OMap k a -> Maybe a
+lookup k (OMap (_, m)) =
+ getV <$> Map.lookup k m
+
+groupTuples
+ :: (Eq k, Hashable k, Foldable t)
+ => t (k, v) -> OMap k (NE.NESeq v)
+groupTuples =
+ foldl' groupFlds empty
+ where
+ groupFlds m (k, v) =
+ insertWith ( maybe (NE.init v) (NE.|> v) ) k m
+
+groupListWith
+ :: (Eq k, Hashable k, Foldable t, Functor t)
+ => (v -> k) -> t v -> OMap k (NE.NESeq v)
+groupListWith f l =
+ groupTuples $ fmap (\v -> (f v, v)) l
diff --git a/server/src-lib/Hasura/GraphQL/Resolve/BoolExp.hs b/server/src-lib/Hasura/GraphQL/Resolve/BoolExp.hs
new file mode 100644
index 00000000..30321acc
--- /dev/null
+++ b/server/src-lib/Hasura/GraphQL/Resolve/BoolExp.hs
@@ -0,0 +1,97 @@
+{-# LANGUAGE FlexibleContexts #-}
+{-# LANGUAGE MultiWayIf #-}
+{-# LANGUAGE NoImplicitPrelude #-}
+{-# LANGUAGE OverloadedStrings #-}
+
+module Hasura.GraphQL.Resolve.BoolExp
+ ( parseBoolExp
+ , convertBoolExp
+ , prepare
+ ) where
+
+import Data.Has
+import Hasura.Prelude
+
+import qualified Data.HashMap.Strict as Map
+import qualified Language.GraphQL.Draft.Syntax as G
+
+import qualified Hasura.RQL.GBoolExp as RA
+import qualified Hasura.RQL.GBoolExp as RG
+import qualified Hasura.SQL.DML as S
+
+import Hasura.GraphQL.Resolve.Context
+import Hasura.GraphQL.Resolve.InputValue
+import Hasura.GraphQL.Validate.Types
+import Hasura.RQL.Types
+
+import Hasura.SQL.Types
+
+parseOpExps
+ :: (MonadError QErr m, MonadReader r m, Has FieldMap r)
+ => AnnGValue -> m [RA.OpExp]
+parseOpExps annVal = do
+ opExpsM <- flip withObjectM annVal $ \nt objM -> forM objM $ \obj ->
+ forM (Map.toList obj) $ \(k, v) -> case k of
+ "_eq" -> fmap RA.AEQ <$> asPGColValM v
+ "_ne" -> fmap RA.ANE <$> asPGColValM v
+ "_neq" -> fmap RA.ANE <$> asPGColValM v
+
+ "_in" -> fmap (RA.AIN . catMaybes) <$> parseMany asPGColValM v
+ "_nin" -> fmap (RA.ANIN . catMaybes) <$> parseMany asPGColValM v
+
+ "_gt" -> fmap RA.AGT <$> asPGColValM v
+ "_lt" -> fmap RA.ALT <$> asPGColValM v
+ "_gte" -> fmap RA.AGTE <$> asPGColValM v
+ "_lte" -> fmap RA.ALTE <$> asPGColValM v
+
+ "_like" -> fmap RA.ALIKE <$> asPGColValM v
+ "_nlike" -> fmap RA.ANLIKE <$> asPGColValM v
+
+ "_ilike" -> fmap RA.AILIKE <$> asPGColValM v
+ "_nilike" -> fmap RA.ANILIKE <$> asPGColValM v
+
+ "_similar" -> fmap RA.ASIMILAR <$> asPGColValM v
+ "_nsimilar" -> fmap RA.ANSIMILAR <$> asPGColValM v
+ _ ->
+ throw500
+ $ "unexpected operator found in opexp of "
+ <> showNamedTy nt
+ <> ": "
+ <> showName k
+ return $ map RA.OEVal $ catMaybes $ fromMaybe [] opExpsM
+
+parseColExp
+ :: (MonadError QErr m, MonadReader r m, Has FieldMap r)
+ => G.NamedType
+ -> G.Name
+ -> AnnGValue
+ -> m RA.AnnVal
+parseColExp nt n val = do
+ fldInfo <- getFldInfo nt n
+ case fldInfo of
+ Left pgColInfo -> RA.AVCol pgColInfo <$> parseOpExps val
+ Right (relInfo, permExp) -> do
+ relBoolExp <- parseBoolExp val
+ return $ RA.AVRel relInfo relBoolExp permExp
+
+parseBoolExp
+ :: (MonadError QErr m, MonadReader r m, Has FieldMap r)
+ => AnnGValue
+ -> m (GBoolExp RA.AnnVal)
+parseBoolExp annGVal = do
+ boolExpsM <-
+ flip withObjectM annGVal
+ $ \nt objM -> forM objM $ \obj -> forM (Map.toList obj) $ \(k, v) -> if
+ | k == "_or" -> BoolOr . fromMaybe [] <$> parseMany parseBoolExp v
+ | k == "_and" -> BoolAnd . fromMaybe [] <$> parseMany parseBoolExp v
+ | k == "_not" -> BoolNot <$> parseBoolExp v
+ | otherwise -> BoolCol <$> parseColExp nt k v
+ return $ BoolAnd $ fromMaybe [] boolExpsM
+
+convertBoolExp
+ :: QualifiedTable
+ -> AnnGValue
+ -> Convert (GBoolExp RG.AnnSQLBoolExp)
+convertBoolExp tn whereArg = do
+ whereExp <- parseBoolExp whereArg
+ RG.convBoolRhs (RG.mkBoolExpBuilder prepare) (S.mkQual tn) whereExp
diff --git a/server/src-lib/Hasura/GraphQL/Resolve/Context.hs b/server/src-lib/Hasura/GraphQL/Resolve/Context.hs
new file mode 100644
index 00000000..3bc71c68
--- /dev/null
+++ b/server/src-lib/Hasura/GraphQL/Resolve/Context.hs
@@ -0,0 +1,129 @@
+{-# LANGUAGE FlexibleContexts #-}
+{-# LANGUAGE MultiWayIf #-}
+{-# LANGUAGE NoImplicitPrelude #-}
+{-# LANGUAGE OverloadedStrings #-}
+
+module Hasura.GraphQL.Resolve.Context
+ ( FieldMap
+ , OrdByResolveCtx
+ , OrdByResolveCtxElem
+ , NullsOrder(..)
+ , OrdTy(..)
+ , RespTx
+ , getFldInfo
+ , getPGColInfo
+ , getArg
+ , withArg
+ , withArgM
+ , PrepArgs
+ , Convert
+ , runConvert
+ , prepare
+ , module Hasura.GraphQL.Utils
+ ) where
+
+import Data.Has
+import Hasura.Prelude
+
+import qualified Data.ByteString.Lazy as BL
+import qualified Data.HashMap.Strict as Map
+import qualified Data.Sequence as Seq
+import qualified Database.PG.Query as Q
+import qualified Language.GraphQL.Draft.Syntax as G
+
+import Hasura.GraphQL.Utils
+import Hasura.GraphQL.Validate.Field
+import Hasura.GraphQL.Validate.Types
+import Hasura.RQL.Types
+import Hasura.SQL.Types
+import Hasura.SQL.Value
+
+import qualified Hasura.SQL.DML as S
+
+type FieldMap
+ = Map.HashMap (G.NamedType, G.Name) (Either PGColInfo (RelInfo, S.BoolExp))
+
+data OrdTy
+ = OAsc
+ | ODesc
+ deriving (Show, Eq)
+
+data NullsOrder
+ = NFirst
+ | NLast
+ deriving (Show, Eq)
+
+type RespTx = Q.TxE QErr BL.ByteString
+
+-- context needed for sql generation
+type OrdByResolveCtxElem = (PGColInfo, OrdTy, NullsOrder)
+
+type OrdByResolveCtx
+ = Map.HashMap (G.NamedType, G.EnumValue) OrdByResolveCtxElem
+
+getFldInfo
+ :: (MonadError QErr m, MonadReader r m, Has FieldMap r)
+ => G.NamedType -> G.Name -> m (Either PGColInfo (RelInfo, S.BoolExp))
+getFldInfo nt n = do
+ fldMap <- asks getter
+ onNothing (Map.lookup (nt,n) fldMap) $
+ throw500 $ "could not lookup " <> showName n <> " in " <>
+ showNamedTy nt
+
+getPGColInfo
+ :: (MonadError QErr m, MonadReader r m, Has FieldMap r)
+ => G.NamedType -> G.Name -> m PGColInfo
+getPGColInfo nt n = do
+ fldInfo <- getFldInfo nt n
+ case fldInfo of
+ Left pgColInfo -> return pgColInfo
+ Right _ -> throw500 $
+ "found relinfo when expecting pgcolinfo for "
+ <> showNamedTy nt <> ":" <> showName n
+
+getArg
+ :: (MonadError QErr m)
+ => ArgsMap
+ -> G.Name
+ -> m AnnGValue
+getArg args arg =
+ onNothing (Map.lookup arg args) $
+ throw500 $ "missing argument: " <> showName arg
+
+withArg
+ :: (MonadError QErr m)
+ => ArgsMap
+ -> G.Name
+ -> (AnnGValue -> m a)
+ -> m a
+withArg args arg f =
+ getArg args arg >>= f
+
+withArgM
+ :: (MonadError QErr m)
+ => ArgsMap
+ -> G.Name
+ -> (AnnGValue -> m a)
+ -> m (Maybe a)
+withArgM args arg f =
+ mapM f $ Map.lookup arg args
+
+type PrepArgs = Seq.Seq Q.PrepArg
+
+type Convert =
+ StateT PrepArgs (ReaderT (FieldMap, OrdByResolveCtx) (Except QErr))
+
+prepare
+ :: (MonadState PrepArgs m)
+ => (PGColType, PGColValue) -> m S.SQLExp
+prepare (colTy, colVal) = do
+ preparedArgs <- get
+ put (preparedArgs Seq.|> binEncoder colVal)
+ return $ toPrepParam (Seq.length preparedArgs + 1) colTy
+
+runConvert
+ :: (MonadError QErr m)
+ => (FieldMap, OrdByResolveCtx) -> Convert a -> m (a, PrepArgs)
+runConvert ctx m =
+ either throwError return $
+ runExcept $ runReaderT (runStateT m Seq.empty) ctx
diff --git a/server/src-lib/Hasura/GraphQL/Resolve/InputValue.hs b/server/src-lib/Hasura/GraphQL/Resolve/InputValue.hs
new file mode 100644
index 00000000..f387306c
--- /dev/null
+++ b/server/src-lib/Hasura/GraphQL/Resolve/InputValue.hs
@@ -0,0 +1,110 @@
+{-# LANGUAGE FlexibleContexts #-}
+{-# LANGUAGE LambdaCase #-}
+{-# LANGUAGE MultiWayIf #-}
+{-# LANGUAGE NoImplicitPrelude #-}
+{-# LANGUAGE OverloadedStrings #-}
+{-# LANGUAGE TupleSections #-}
+
+module Hasura.GraphQL.Resolve.InputValue
+ ( withNotNull
+ , tyMismatch
+ , asPGColValM
+ , asPGColVal
+ , asEnumVal
+ , withObject
+ , withObjectM
+ , withArray
+ , withArrayM
+ , parseMany
+ ) where
+
+import Hasura.Prelude
+
+import qualified Data.Text as T
+import qualified Language.GraphQL.Draft.Syntax as G
+
+import Hasura.GraphQL.Utils
+import Hasura.GraphQL.Validate.Types
+import Hasura.RQL.Types
+import Hasura.SQL.Types
+import Hasura.SQL.Value
+
+withNotNull
+ :: (MonadError QErr m)
+ => G.NamedType -> Maybe a -> m a
+withNotNull nt v =
+ onNothing v $ throw500 $
+ "unexpected null for a value of type " <> showNamedTy nt
+
+tyMismatch
+ :: (MonadError QErr m) => Text -> AnnGValue -> m a
+tyMismatch expectedTy v =
+ throw500 $ "expected " <> expectedTy <> ", found " <>
+ getAnnInpValKind v <> " for value of type " <>
+ G.showGT (getAnnInpValTy v)
+
+asPGColValM
+ :: (MonadError QErr m)
+ => AnnGValue -> m (Maybe (PGColType, PGColValue))
+asPGColValM = \case
+ AGScalar colTy valM -> return $ fmap (colTy,) valM
+ v -> tyMismatch "pgvalue" v
+
+asPGColVal
+ :: (MonadError QErr m)
+ => AnnGValue -> m (PGColType, PGColValue)
+asPGColVal = \case
+ AGScalar colTy (Just val) -> return (colTy, val)
+ AGScalar colTy Nothing ->
+ throw500 $ "unexpected null for ty"
+ <> T.pack (show colTy)
+ v -> tyMismatch "pgvalue" v
+
+asEnumVal
+ :: (MonadError QErr m)
+ => AnnGValue -> m (G.NamedType, G.EnumValue)
+asEnumVal = \case
+ AGEnum ty (Just val) -> return (ty, val)
+ AGEnum ty Nothing ->
+ throw500 $ "unexpected null for ty " <> showNamedTy ty
+ v -> tyMismatch "enum" v
+
+withObject
+ :: (MonadError QErr m)
+ => (G.NamedType -> AnnGObject -> m a) -> AnnGValue -> m a
+withObject fn v = case v of
+ AGObject nt (Just obj) -> fn nt obj
+ AGObject nt Nothing ->
+ throw500 $ "unexpected null for ty"
+ <> G.showGT (G.TypeNamed nt)
+ _ -> tyMismatch "object" v
+
+withObjectM
+ :: (MonadError QErr m)
+ => (G.NamedType -> Maybe AnnGObject -> m a) -> AnnGValue -> m a
+withObjectM fn v = case v of
+ AGObject nt objM -> fn nt objM
+ _ -> tyMismatch "object" v
+
+withArrayM
+ :: (MonadError QErr m)
+ => (G.ListType -> Maybe [AnnGValue] -> m a) -> AnnGValue -> m a
+withArrayM fn v = case v of
+ AGArray lt listM -> fn lt listM
+ _ -> tyMismatch "array" v
+
+withArray
+ :: (MonadError QErr m)
+ => (G.ListType -> [AnnGValue] -> m a) -> AnnGValue -> m a
+withArray fn v = case v of
+ AGArray lt (Just l) -> fn lt l
+ AGArray lt Nothing -> throw500 $ "unexpected null for ty"
+ <> G.showGT (G.TypeList lt)
+ _ -> tyMismatch "array" v
+
+parseMany
+ :: (MonadError QErr m)
+ => (AnnGValue -> m a) -> AnnGValue -> m (Maybe [a])
+parseMany fn v = case v of
+ AGArray _ arrM -> mapM (mapM fn) arrM
+ _ -> tyMismatch "array" v
diff --git a/server/src-lib/Hasura/GraphQL/Resolve/Introspect.hs b/server/src-lib/Hasura/GraphQL/Resolve/Introspect.hs
new file mode 100644
index 00000000..9c6b1f7f
--- /dev/null
+++ b/server/src-lib/Hasura/GraphQL/Resolve/Introspect.hs
@@ -0,0 +1,302 @@
+{-# LANGUAGE FlexibleContexts #-}
+{-# LANGUAGE LambdaCase #-}
+{-# LANGUAGE MultiWayIf #-}
+{-# LANGUAGE NoImplicitPrelude #-}
+{-# LANGUAGE OverloadedStrings #-}
+{-# LANGUAGE ScopedTypeVariables #-}
+
+module Hasura.GraphQL.Resolve.Introspect
+ ( schemaR
+ , typeR
+ ) where
+
+import Data.Has
+import Hasura.Prelude
+
+import qualified Data.Aeson as J
+import qualified Data.Text as T
+
+import qualified Data.HashMap.Strict as Map
+import qualified Language.GraphQL.Draft.Syntax as G
+
+import Hasura.GraphQL.Resolve.Context
+import Hasura.GraphQL.Resolve.InputValue
+import Hasura.GraphQL.Validate.Context
+import Hasura.GraphQL.Validate.Field
+import Hasura.GraphQL.Validate.Types
+
+import Hasura.RQL.Types
+import Hasura.SQL.Value
+
+data TypeKind
+ = TKSCALAR
+ | TKOBJECT
+ | TKINTERFACE
+ | TKUNION
+ | TKENUM
+ | TKINPUT_OBJECT
+ | TKLIST
+ | TKNON_NULL
+ deriving (Show, Eq)
+
+instance J.ToJSON TypeKind where
+ toJSON = J.toJSON . T.pack . drop 2 . show
+
+withSubFields
+ :: (Monad m)
+ => SelSet
+ -> (Field -> m J.Value)
+ -> m J.Object
+withSubFields selSet fn =
+ fmap Map.fromList $ forM (toList selSet) $ \fld -> do
+ val <- fn fld
+ return (G.unName $ G.unAlias $ _fAlias fld, val)
+
+namedTyToTxt :: G.NamedType -> Text
+namedTyToTxt = G.unName . G.unNamedType
+
+retJ :: (Applicative m, J.ToJSON a) => a -> m J.Value
+retJ = pure . J.toJSON
+
+retJT :: (Applicative m) => Text -> m J.Value
+retJT = pure . J.toJSON
+
+-- 4.5.2.1
+scalarR
+ :: ( MonadReader r m, Has TypeMap r
+ , MonadError QErr m)
+ => ScalarTyInfo
+ -> Field
+ -> m J.Object
+scalarR (ScalarTyInfo descM pgColType) fld =
+ withSubFields (_fSelSet fld) $ \subFld ->
+ case _fName subFld of
+ "__typename" -> retJT "__Type"
+ "kind" -> retJ TKSCALAR
+ "description" -> retJ $ fmap G.unDescription descM
+ "name" -> retJ $ pgColTyToScalar pgColType
+ _ -> return J.Null
+
+-- 4.5.2.2
+objectTypeR
+ :: ( MonadReader r m, Has TypeMap r
+ , MonadError QErr m)
+ => ObjTyInfo
+ -> Field
+ -> m J.Object
+objectTypeR (ObjTyInfo descM n flds) fld =
+ withSubFields (_fSelSet fld) $ \subFld ->
+ case _fName subFld of
+ "__typename" -> retJT "__Type"
+ "kind" -> retJ TKOBJECT
+ "name" -> retJ $ namedTyToTxt n
+ "description" -> retJ $ fmap G.unDescription descM
+ "interfaces" -> retJ ([] :: [()])
+ "fields" -> fmap J.toJSON $ mapM (`fieldR` subFld) $
+ sortBy (comparing _fiName) $
+ filter notBuiltinFld $ Map.elems flds
+ _ -> return J.Null
+
+notBuiltinFld :: ObjFldInfo -> Bool
+notBuiltinFld f =
+ fldName /= "__typename" && fldName /= "__type" && fldName /= "__schema"
+ where
+ fldName = _fiName f
+
+-- 4.5.2.5
+enumTypeR
+ :: ( MonadReader r m, Has TypeMap r
+ , MonadError QErr m)
+ => EnumTyInfo
+ -> Field
+ -> m J.Object
+enumTypeR (EnumTyInfo descM n vals) fld =
+ withSubFields (_fSelSet fld) $ \subFld ->
+ case _fName subFld of
+ "__typename" -> retJT "__Type"
+ "kind" -> retJ TKENUM
+ "name" -> retJ $ namedTyToTxt n
+ "description" -> retJ $ fmap G.unDescription descM
+ "enumValues" -> fmap J.toJSON $ mapM (enumValueR subFld) $
+ sortBy (comparing _eviVal) $ Map.elems vals
+ _ -> return J.Null
+
+-- 4.5.2.6
+inputObjR
+ :: ( MonadReader r m, Has TypeMap r
+ , MonadError QErr m)
+ => InpObjTyInfo
+ -> Field
+ -> m J.Object
+inputObjR (InpObjTyInfo descM nt flds) fld =
+ withSubFields (_fSelSet fld) $ \subFld ->
+ case _fName subFld of
+ "__typename" -> retJT "__Type"
+ "kind" -> retJ TKINPUT_OBJECT
+ "name" -> retJ $ namedTyToTxt nt
+ "description" -> retJ $ fmap G.unDescription descM
+ "inputFields" -> fmap J.toJSON $ mapM (inputValueR subFld) $
+ sortBy (comparing _iviName) $ Map.elems flds
+ _ -> return J.Null
+
+-- 4.5.2.7
+listTypeR
+ :: ( MonadReader r m, Has TypeMap r
+ , MonadError QErr m)
+ => G.ListType -> Field -> m J.Object
+listTypeR (G.ListType ty) fld =
+ withSubFields (_fSelSet fld) $ \subFld ->
+ case _fName subFld of
+ "__typename" -> retJT "__Type"
+ "kind" -> retJ TKLIST
+ "ofType" -> J.toJSON <$> gtypeR ty subFld
+ _ -> return J.Null
+
+-- 4.5.2.8
+nonNullR
+ :: ( MonadReader r m, Has TypeMap r
+ , MonadError QErr m)
+ => G.NonNullType -> Field -> m J.Object
+nonNullR nnt fld =
+ withSubFields (_fSelSet fld) $ \subFld ->
+ case _fName subFld of
+ "__typename" -> retJT "__Type"
+ "kind" -> retJ TKNON_NULL
+ "ofType" -> case nnt of
+ G.NonNullTypeNamed nt -> J.toJSON <$> namedTypeR nt subFld
+ G.NonNullTypeList lt -> J.toJSON <$> listTypeR lt subFld
+ _ -> return J.Null
+
+namedTypeR
+ :: ( MonadReader r m, Has TypeMap r
+ , MonadError QErr m)
+ => G.NamedType
+ -> Field
+ -> m J.Object
+namedTypeR nt fld = do
+ tyInfo <- getTyInfo nt
+ namedTypeR' fld tyInfo
+
+namedTypeR'
+ :: ( MonadReader r m, Has TypeMap r
+ , MonadError QErr m)
+ => Field
+ -> TypeInfo
+ -> m J.Object
+namedTypeR' fld = \case
+ TIScalar colTy -> scalarR colTy fld
+ TIObj objTyInfo -> objectTypeR objTyInfo fld
+ TIEnum enumTypeInfo -> enumTypeR enumTypeInfo fld
+ TIInpObj inpObjTyInfo -> inputObjR inpObjTyInfo fld
+
+-- 4.5.3
+fieldR
+ :: ( MonadReader r m, Has TypeMap r
+ , MonadError QErr m)
+ => ObjFldInfo -> Field -> m J.Object
+fieldR (ObjFldInfo descM n params ty) fld =
+ withSubFields (_fSelSet fld) $ \subFld ->
+ case _fName subFld of
+ "__typename" -> retJT "__Field"
+ "name" -> retJ $ G.unName n
+ "description" -> retJ $ fmap G.unDescription descM
+ "args" -> fmap J.toJSON $ mapM (inputValueR subFld) $
+ sortBy (comparing _iviName) $ Map.elems params
+ "type" -> J.toJSON <$> gtypeR ty subFld
+ "isDeprecated" -> retJ False
+ _ -> return J.Null
+
+-- 4.5.4
+inputValueR
+ :: ( MonadReader r m, Has TypeMap r
+ , MonadError QErr m)
+ => Field -> InpValInfo -> m J.Object
+inputValueR fld (InpValInfo descM n ty) =
+ withSubFields (_fSelSet fld) $ \subFld ->
+ case _fName subFld of
+ "__typename" -> retJT "__InputValue"
+ "name" -> retJ $ G.unName n
+ "description" -> retJ $ fmap G.unDescription descM
+ "type" -> J.toJSON <$> gtypeR ty subFld
+ -- TODO: figure out what the spec means by 'string encoding'
+ "defaultValue" -> return J.Null
+ _ -> return J.Null
+
+-- 4.5.5
+enumValueR
+ :: ( MonadReader r m, Has TypeMap r
+ , MonadError QErr m)
+ => Field -> EnumValInfo -> m J.Object
+enumValueR fld (EnumValInfo descM enumVal isDeprecated) =
+ withSubFields (_fSelSet fld) $ \subFld ->
+ case _fName subFld of
+ "__typename" -> retJT "__EnumValue"
+ "name" -> retJ $ G.unName $ G.unEnumValue enumVal
+ "description" -> retJ $ fmap G.unDescription descM
+ "isDeprecated" -> retJ isDeprecated
+ _ -> return J.Null
+
+-- 4.5.6
+directiveR
+ :: ( MonadReader r m, Has TypeMap r
+ , MonadError QErr m)
+ => Field -> DirectiveInfo -> m J.Object
+directiveR fld (DirectiveInfo descM n args locs) =
+ withSubFields (_fSelSet fld) $ \subFld ->
+ case _fName subFld of
+ "__typename" -> retJT "__Directive"
+ "name" -> retJ $ G.unName n
+ "description" -> retJ $ fmap G.unDescription descM
+ "locations" -> retJ $ map showDirLoc locs
+ "args" -> fmap J.toJSON $ mapM (inputValueR subFld) $
+ sortBy (comparing _iviName) $ Map.elems args
+ _ -> return J.Null
+
+showDirLoc :: G.DirectiveLocation -> Text
+showDirLoc = \case
+ G.DLExecutable edl -> T.pack $ drop 3 $ show edl
+ G.DLTypeSystem tsdl -> T.pack $ drop 4 $ show tsdl
+
+gtypeR
+ :: ( MonadReader r m, Has TypeMap r
+ , MonadError QErr m)
+ => G.GType -> Field -> m J.Object
+gtypeR ty fld =
+ case ty of
+ G.TypeList lt -> listTypeR lt fld
+ G.TypeNonNull nnt -> nonNullR nnt fld
+ G.TypeNamed nt -> namedTypeR nt fld
+
+schemaR
+ :: ( MonadReader r m, Has TypeMap r
+ , MonadError QErr m)
+ => Field -> m J.Object
+schemaR fld =
+ withSubFields (_fSelSet fld) $ \subFld -> do
+ (tyMap :: TypeMap) <- asks getter
+ case _fName subFld of
+ "__typename" -> retJT "__Schema"
+ "types" -> fmap J.toJSON $ mapM (namedTypeR' subFld) $
+ sortBy (comparing getNamedTy) $ Map.elems tyMap
+ "queryType" -> J.toJSON <$> namedTypeR (G.NamedType "query_root") subFld
+ "mutationType" -> J.toJSON <$> namedTypeR (G.NamedType "mutation_root") subFld
+ "directives" -> J.toJSON <$> mapM (directiveR subFld)
+ (sortBy (comparing _diName) defaultDirectives)
+ _ -> return J.Null
+
+typeR
+ :: ( MonadReader r m, Has TypeMap r
+ , MonadError QErr m)
+ => Field -> m J.Value
+typeR fld = do
+ tyMap <- asks getter
+ name <- withArg args "name" $ \arg -> do
+ (_, pgColVal) <- asPGColVal arg
+ case pgColVal of
+ PGValText t -> return t
+ _ -> throw500 "expecting string for name arg of __type"
+ case Map.lookup (G.NamedType (G.Name name)) tyMap of
+ Nothing -> return J.Null
+ Just tyInfo -> J.Object <$> namedTypeR' fld tyInfo
+ where
+ args = _fArguments fld
diff --git a/server/src-lib/Hasura/GraphQL/Resolve/Mutation.hs b/server/src-lib/Hasura/GraphQL/Resolve/Mutation.hs
new file mode 100644
index 00000000..e6ebe177
--- /dev/null
+++ b/server/src-lib/Hasura/GraphQL/Resolve/Mutation.hs
@@ -0,0 +1,114 @@
+{-# LANGUAGE FlexibleContexts #-}
+{-# LANGUAGE MultiWayIf #-}
+{-# LANGUAGE NoImplicitPrelude #-}
+{-# LANGUAGE OverloadedStrings #-}
+
+module Hasura.GraphQL.Resolve.Mutation
+ ( convertUpdate
+ , convertInsert
+ , convertDelete
+ ) where
+
+import Data.Has
+import Hasura.Prelude
+
+import qualified Data.HashMap.Strict as Map
+import qualified Language.GraphQL.Draft.Syntax as G
+
+import qualified Hasura.RQL.DML.Delete as RD
+import qualified Hasura.RQL.DML.Insert as RI
+import qualified Hasura.RQL.DML.Returning as RR
+import qualified Hasura.RQL.DML.Update as RU
+
+import qualified Hasura.SQL.DML as S
+
+import Hasura.GraphQL.Resolve.BoolExp
+import Hasura.GraphQL.Resolve.Context
+import Hasura.GraphQL.Resolve.InputValue
+import Hasura.GraphQL.Validate.Field
+import Hasura.GraphQL.Validate.Types
+import Hasura.RQL.Types
+import Hasura.SQL.Types
+
+withSelSet :: (Monad m) => SelSet -> (Field -> m a) -> m (Map.HashMap Text a)
+withSelSet selSet f =
+ fmap (Map.fromList . toList) $ forM selSet $ \fld -> do
+ res <- f fld
+ return (G.unName $ G.unAlias $ _fAlias fld, res)
+
+convertReturning
+ :: (MonadError QErr m, MonadReader r m, Has FieldMap r)
+ => G.NamedType -> SelSet -> m RR.RetFlds
+convertReturning ty selSet =
+ withSelSet selSet $ \fld ->
+ case _fName fld of
+ "__typename" -> return $ RR.RExp $ G.unName $ G.unNamedType ty
+ _ -> do
+ PGColInfo col colTy <- getPGColInfo ty $ _fName fld
+ return $ RR.RCol (col, colTy)
+
+convertMutResp
+ :: (MonadError QErr m, MonadReader r m, Has FieldMap r)
+ => G.NamedType -> SelSet -> m RR.MutFlds
+convertMutResp ty selSet =
+ withSelSet selSet $ \fld ->
+ case _fName fld of
+ "__typename" -> return $ RR.MExp $ G.unName $ G.unNamedType ty
+ "affected_rows" -> return RR.MCount
+ _ -> fmap RR.MRet $ convertReturning (_fType fld) $ _fSelSet fld
+
+convertRowObj
+ :: (MonadError QErr m, MonadState PrepArgs m)
+ => AnnGValue
+ -> m [(PGCol, S.SQLExp)]
+convertRowObj val =
+ flip withObject val $ \_ obj -> forM (Map.toList obj) $ \(k, v) -> do
+ prepExp <- asPGColVal v >>= prepare
+ return (PGCol $ G.unName k, prepExp)
+
+-- TODO: add conflict clause
+convertInsert
+ :: (QualifiedTable, QualifiedTable) -- table, view
+ -> [PGCol] -- all the columns in this table
+ -> Field -- the mutation field
+ -> Convert RespTx
+convertInsert (tn, vn) tableCols fld = do
+ rows <- withArg (_fArguments fld) "objects" asRowExps
+ mutFlds <- convertMutResp (_fType fld) $ _fSelSet fld
+ args <- get
+ let p1 = RI.InsertQueryP1 tn vn tableCols rows Nothing mutFlds
+ return $ RI.insertP2 (p1, args)
+ where
+ asRowExps = withArray (const $ mapM rowExpWithDefaults)
+ rowExpWithDefaults val = do
+ givenCols <- convertRowObj val
+ return $ Map.elems $ Map.union (Map.fromList givenCols) defVals
+ defVals = Map.fromList $ zip tableCols (repeat $ S.SEUnsafe "DEFAULT")
+
+convertUpdate
+ :: QualifiedTable -- table
+ -> S.BoolExp -- the filter expression
+ -> Field -- the mutation field
+ -> Convert RespTx
+convertUpdate tn filterExp fld = do
+ -- a set expression is same as a row object
+ setExp <- withArg args "_set" convertRowObj
+ whereExp <- withArg args "where" $ convertBoolExp tn
+ mutFlds <- convertMutResp (_fType fld) $ _fSelSet fld
+ prepArgs <- get
+ let p1 = RU.UpdateQueryP1 tn setExp (filterExp, whereExp) mutFlds
+ return $ RU.updateP2 (p1, prepArgs)
+ where
+ args = _fArguments fld
+
+convertDelete
+ :: QualifiedTable -- table
+ -> S.BoolExp -- the filter expression
+ -> Field -- the mutation field
+ -> Convert RespTx
+convertDelete tn filterExp fld = do
+ whereExp <- withArg (_fArguments fld) "where" $ convertBoolExp tn
+ mutFlds <- convertMutResp (_fType fld) $ _fSelSet fld
+ args <- get
+ let p1 = RD.DeleteQueryP1 tn (filterExp, whereExp) mutFlds
+ return $ RD.deleteP2 (p1, args)
diff --git a/server/src-lib/Hasura/GraphQL/Resolve/Select.hs b/server/src-lib/Hasura/GraphQL/Resolve/Select.hs
new file mode 100644
index 00000000..a632520d
--- /dev/null
+++ b/server/src-lib/Hasura/GraphQL/Resolve/Select.hs
@@ -0,0 +1,107 @@
+{-# LANGUAGE FlexibleContexts #-}
+{-# LANGUAGE LambdaCase #-}
+{-# LANGUAGE MultiWayIf #-}
+{-# LANGUAGE NoImplicitPrelude #-}
+{-# LANGUAGE OverloadedStrings #-}
+
+module Hasura.GraphQL.Resolve.Select
+ ( convertSelect
+ ) where
+
+import Data.Has
+import Hasura.Prelude
+
+import qualified Data.HashMap.Strict as Map
+import qualified Language.GraphQL.Draft.Syntax as G
+
+import qualified Hasura.RQL.DML.Select as RS
+
+import qualified Hasura.SQL.DML as S
+
+import Hasura.GraphQL.Resolve.BoolExp
+import Hasura.GraphQL.Resolve.Context
+import Hasura.GraphQL.Resolve.InputValue
+import Hasura.GraphQL.Validate.Field
+import Hasura.GraphQL.Validate.Types
+import Hasura.RQL.Types
+import Hasura.SQL.Types
+
+fromSelSet
+ :: G.NamedType
+ -> SelSet
+ -> Convert (Map.HashMap FieldName RS.AnnFld)
+fromSelSet fldTy flds =
+ fmap Map.fromList $ forM (toList flds) $ \fld -> do
+ let fldName = _fName fld
+ let rqlFldName = FieldName $ G.unName $ G.unAlias $ _fAlias fld
+ case fldName of
+ "__typename" -> return (rqlFldName, RS.FExp $ G.unName $ G.unNamedType fldTy)
+ _ -> do
+ fldInfo <- getFldInfo fldTy fldName
+ case fldInfo of
+ Left (PGColInfo pgCol colTy) -> return (rqlFldName, RS.FCol (pgCol, colTy))
+ Right (relInfo, tableFilter) -> do
+ let relTN = riRTable relInfo
+ relSelData <- fromField relTN tableFilter fld
+ let annRel = RS.AnnRel (riName relInfo) (riType relInfo)
+ (riMapping relInfo) relSelData
+ return (rqlFldName, RS.FRel annRel)
+
+fromField
+ :: QualifiedTable -> S.BoolExp -> Field -> Convert RS.SelectData
+fromField tn permFilter fld = do
+ whereExpM <- withArgM args "where" $ convertBoolExp tn
+ ordByExpM <- withArgM args "order_by" parseOrderBy
+ limitExpM <- withArgM args "limit" $ asPGColVal >=> prepare
+ offsetExpM <- withArgM args "offset" $ asPGColVal >=> prepare
+ annFlds <- fromSelSet (_fType fld) $ _fSelSet fld
+ return $ RS.SelectData annFlds tn (permFilter, whereExpM) ordByExpM
+ [] limitExpM offsetExpM
+ where
+ args = _fArguments fld
+
+getEnumInfo
+ :: ( MonadError QErr m
+ , MonadReader r m
+ , Has OrdByResolveCtx r
+ )
+ => G.NamedType -> G.EnumValue -> m OrdByResolveCtxElem
+getEnumInfo nt v = do
+ -- fldMap <- _gcFieldMap <$> ask
+ ordByCtx <- asks getter
+ onNothing (Map.lookup (nt,v) ordByCtx) $
+ throw500 $ "could not lookup " <> showName (G.unEnumValue v) <> " in " <>
+ showNamedTy nt
+
+parseOrderBy
+ :: (MonadError QErr m
+ , MonadReader r m
+ , Has OrdByResolveCtx r
+ )
+ => AnnGValue -> m S.OrderByExp
+parseOrderBy v = do
+ enums <- withArray (const $ mapM asEnumVal) v
+ fmap S.OrderByExp $ forM enums $ \(nt, ev) ->
+ convOrdByElem <$> getEnumInfo nt ev
+ -- return $ map convOrdByElem enums
+ -- undefined
+ where
+ convOrdByElem (PGColInfo col _, ordTy, nullsOrd) =
+ S.OrderByItem (Left col)
+ (Just $ convOrdTy ordTy)
+ (Just $ convNullsOrd nullsOrd)
+
+ convOrdTy = \case
+ OAsc -> S.OTAsc
+ ODesc -> S.OTDesc
+
+ convNullsOrd = \case
+ NFirst -> S.NFirst
+ NLast -> S.NLast
+
+convertSelect
+ :: QualifiedTable -> S.BoolExp -> Field -> Convert RespTx
+convertSelect qt permFilter fld = do
+ selData <- fromField qt permFilter fld
+ prepArgs <- get
+ return $ RS.selectP2 (selData, prepArgs)
diff --git a/server/src-lib/Hasura/GraphQL/Schema.hs b/server/src-lib/Hasura/GraphQL/Schema.hs
new file mode 100644
index 00000000..644cc4fa
--- /dev/null
+++ b/server/src-lib/Hasura/GraphQL/Schema.hs
@@ -0,0 +1,756 @@
+{-# LANGUAGE FlexibleContexts #-}
+{-# LANGUAGE FlexibleInstances #-}
+{-# LANGUAGE LambdaCase #-}
+{-# LANGUAGE MultiParamTypeClasses #-}
+{-# LANGUAGE MultiWayIf #-}
+{-# LANGUAGE NoImplicitPrelude #-}
+{-# LANGUAGE OverloadedStrings #-}
+{-# LANGUAGE TemplateHaskell #-}
+{-# LANGUAGE TupleSections #-}
+
+module Hasura.GraphQL.Schema
+ ( mkGCtxMap
+ , GCtxMap
+ , getGCtx
+ , GCtx(..)
+ , OpCtx(..)
+ , OrdByResolveCtx
+ , OrdByResolveCtxElem
+ , NullsOrder(..)
+ , OrdTy(..)
+ ) where
+
+import Data.Has
+
+import qualified Data.HashMap.Strict as Map
+import qualified Data.HashSet as Set
+
+import qualified Data.Text as T
+import qualified Language.GraphQL.Draft.Syntax as G
+
+import Hasura.GraphQL.Resolve.Context
+import Hasura.GraphQL.Validate.Types
+
+import Hasura.RQL.Types
+import Hasura.SQL.Types
+import Hasura.Prelude
+
+import qualified Hasura.SQL.DML as S
+
+defaultTypes :: [TypeInfo]
+defaultTypes = $(fromSchemaDocQ defaultSchema)
+-- defaultTypes = undefined
+
+type OpCtxMap = Map.HashMap G.Name OpCtx
+
+data OpCtx
+ -- tn, vn, cols, req hdrs
+ = OCInsert QualifiedTable QualifiedTable [PGCol] [T.Text]
+ -- tn, filter exp, req hdrs
+ | OCSelect QualifiedTable S.BoolExp [T.Text]
+ -- tn, filter exp, req hdrs
+ | OCUpdate QualifiedTable S.BoolExp [T.Text]
+ -- tn, filter exp, req hdrs
+ | OCDelete QualifiedTable S.BoolExp [T.Text]
+ deriving (Show, Eq)
+
+data GCtx
+ = GCtx
+ { _gTypes :: !TypeMap
+ , _gFields :: !FieldMap
+ , _gOrdByEnums :: !OrdByResolveCtx
+ , _gQueryRoot :: !ObjTyInfo
+ , _gMutRoot :: !(Maybe ObjTyInfo)
+ , _gOpCtxMap :: !OpCtxMap
+ } deriving (Show, Eq)
+
+instance Has TypeMap GCtx where
+ getter = _gTypes
+ modifier f ctx = ctx { _gTypes = f $ _gTypes ctx }
+
+data TyAgg
+ = TyAgg
+ { _taTypes :: !TypeMap
+ , _taFields :: !FieldMap
+ , _taOrdByEnums :: !OrdByResolveCtx
+ } deriving (Show, Eq)
+
+instance Semigroup TyAgg where
+ (TyAgg t1 f1 o1) <> (TyAgg t2 f2 o2) =
+ TyAgg (Map.union t1 t2) (Map.union f1 f2) (Map.union o1 o2)
+
+instance Monoid TyAgg where
+ mempty = TyAgg Map.empty Map.empty Map.empty
+ mappend = (<>)
+
+type SelField = Either PGColInfo (RelInfo, S.BoolExp)
+
+qualTableToName :: QualifiedTable -> G.Name
+qualTableToName = G.Name <$> \case
+ QualifiedTable (SchemaName "public") tn -> getTableTxt tn
+ QualifiedTable sn tn -> getSchemaTxt sn <> "_" <> getTableTxt tn
+
+mkCompExpName :: PGColType -> G.Name
+mkCompExpName pgColTy =
+ G.Name $ T.pack (show pgColTy) <> "_comparison_exp"
+
+mkCompExpTy :: PGColType -> G.NamedType
+mkCompExpTy =
+ G.NamedType . mkCompExpName
+
+mkBoolExpName :: QualifiedTable -> G.Name
+mkBoolExpName tn =
+ qualTableToName tn <> "_bool_exp"
+
+mkBoolExpTy :: QualifiedTable -> G.NamedType
+mkBoolExpTy =
+ G.NamedType . mkBoolExpName
+
+mkTableTy :: QualifiedTable -> G.NamedType
+mkTableTy =
+ G.NamedType . qualTableToName
+
+mkCompExpInp :: PGColType -> InpObjTyInfo
+mkCompExpInp colTy =
+ InpObjTyInfo (Just tyDesc) (mkCompExpTy colTy) $ fromInpValL $ concat
+ [ map (mk colScalarTy) typedOps
+ , map (mk $ G.toLT colScalarTy) listOps
+ , bool [] (map (mk $ mkScalarTy PGText) stringOps) isStringTy
+ ]
+ where
+ tyDesc = mconcat
+ [ "expression to compare columns of type "
+ , G.Description (T.pack $ show colTy)
+ , ". All fields are combined with logical 'AND'."
+ ]
+
+ isStringTy = case colTy of
+ PGVarchar -> True
+ PGText -> True
+ _ -> False
+
+ mk t n = InpValInfo Nothing n $ G.toGT t
+
+ colScalarTy = mkScalarTy colTy
+ -- colScalarListTy = GA.GTList colGTy
+
+ typedOps =
+ ["_eq", "_neq", "_gt", "_lt", "_gte", "_lte"]
+
+ listOps =
+ [ "_in", "_nin" ]
+
+ -- TODO
+ -- columnOps =
+ -- [ "_ceq", "_cneq", "_cgt", "_clt", "_cgte", "_clte"]
+
+ stringOps =
+ [ "_like", "_nlike", "_ilike", "_nilike"
+ , "_similar", "_nsimilar"
+ ]
+
+mkPGColFld :: PGColInfo -> ObjFldInfo
+mkPGColFld (PGColInfo colName colTy) =
+ ObjFldInfo Nothing n Map.empty ty
+ where
+ n = G.Name $ getPGColTxt colName
+ ty = G.toGT $ mkScalarTy colTy
+
+-- where: table_bool_exp
+-- limit: Int
+-- offset: Int
+mkSelArgs :: QualifiedTable -> [InpValInfo]
+mkSelArgs tn =
+ [ InpValInfo (Just whereDesc) "where" $ G.toGT $ mkBoolExpTy tn
+ , InpValInfo (Just limitDesc) "limit" $ G.toGT $ mkScalarTy PGInteger
+ , InpValInfo (Just offsetDesc) "offset" $ G.toGT $ mkScalarTy PGInteger
+ , InpValInfo (Just orderByDesc) "order_by" $ G.toGT $ G.toLT $ G.toNT $
+ mkOrdByTy tn
+ ]
+ where
+ whereDesc = "filter the rows returned"
+ limitDesc = "limit the nuber of rows returned"
+ offsetDesc = "skip the first n rows. Use only with order_by"
+ orderByDesc = "sort the rows by one or more columns"
+
+fromInpValL :: [InpValInfo] -> Map.HashMap G.Name InpValInfo
+fromInpValL = mapFromL _iviName
+
+{-
+
+array_relationship(
+ where: remote_table_bool_exp
+ limit: Int
+ offset: Int
+): [remote_table!]!
+object_relationship: remote_table
+
+-}
+mkRelFld :: RelInfo -> ObjFldInfo
+mkRelFld (RelInfo rn rTy _ remTab _) = case rTy of
+ ArrRel ->
+ ObjFldInfo (Just "An array relationship") (G.Name $ getRelTxt rn)
+ (fromInpValL $ mkSelArgs remTab)
+ (G.toGT $ G.toNT $ G.toLT $ G.toNT relTabTy)
+ ObjRel ->
+ ObjFldInfo (Just "An object relationship") (G.Name $ getRelTxt rn)
+ Map.empty
+ (G.toGT relTabTy)
+ where
+ relTabTy = mkTableTy remTab
+
+{-
+type table {
+ col1: colty1
+ .
+ .
+ rel1: relty1
+}
+-}
+mkTableObj
+ :: QualifiedTable
+ -> [SelField]
+ -> ObjTyInfo
+mkTableObj tn allowedFlds =
+ mkObjTyInfo (Just desc) (mkTableTy tn) $ mapFromL _fiName flds
+ where
+ flds = map (either mkPGColFld (mkRelFld . fst)) allowedFlds
+ desc = G.Description $
+ "columns and relationships of " <>> tn
+
+{-
+
+table(
+ where: table_bool_exp
+ limit: Int
+ offset: Int
+): [table!]!
+
+-}
+mkSelFld
+ :: QualifiedTable
+ -> ObjFldInfo
+mkSelFld tn =
+ ObjFldInfo (Just desc) fldName args ty
+ where
+ desc = G.Description $ "fetch data from the table: " <>> tn
+ fldName = qualTableToName tn
+ args = fromInpValL $ mkSelArgs tn
+ ty = G.toGT $ G.toNT $ G.toLT $ G.toNT $ mkTableTy tn
+
+-- table_mutation_response
+mkMutRespTy :: QualifiedTable -> G.NamedType
+mkMutRespTy tn =
+ G.NamedType $ qualTableToName tn <> "_mutation_response"
+
+{-
+type table_mutation_response {
+ affected_rows: Int!
+ returning: [table_no_rels!]!
+}
+-}
+mkMutRespObj
+ :: QualifiedTable
+ -> ObjTyInfo
+mkMutRespObj tn =
+ mkObjTyInfo (Just objDesc) (mkMutRespTy tn) $ mapFromL _fiName
+ [affectedRowsFld, returningFld]
+ where
+ objDesc = G.Description $
+ "response of any mutation on the table " <>> tn
+ affectedRowsFld =
+ ObjFldInfo (Just desc) "affected_rows" Map.empty $
+ G.toGT $ G.toNT $ mkScalarTy PGInteger
+ where
+ desc = "number of affected rows by the mutation"
+ returningFld =
+ ObjFldInfo (Just desc) "returning" Map.empty $
+ G.toGT $ G.toNT $ G.toLT $ G.toNT $ mkTableNoRelsTy tn
+ where
+ desc = "data of the affected rows by the mutation"
+
+-- table_no_rels
+mkTableNoRelsTy :: QualifiedTable -> G.NamedType
+mkTableNoRelsTy tn =
+ G.NamedType $ qualTableToName tn <> "_no_rels"
+
+mkTableNoRelsObj
+ :: QualifiedTable
+ -> [SelField]
+ -> ObjTyInfo
+mkTableNoRelsObj tn fields =
+ mkObjTyInfo (Just desc) (mkTableNoRelsTy tn) $ mapFromL _fiName pgCols
+ where
+ pgCols = map mkPGColFld $ lefts fields
+ desc = G.Description $
+ "only postgres columns (no relationships) from " <>> tn
+
+mkBoolExpInp
+ :: QualifiedTable
+ -- the fields that are allowed
+ -> [SelField]
+ -> InpObjTyInfo
+mkBoolExpInp tn fields =
+ InpObjTyInfo (Just desc) boolExpTy $ Map.fromList
+ [(_iviName inpVal, inpVal) | inpVal <- inpValues]
+ where
+ desc = G.Description $
+ "Boolean expression to filter rows from the table " <> tn <<>
+ ". All fields are combined with a logical 'AND'."
+
+ -- the type of this boolean expression
+ boolExpTy = mkBoolExpTy tn
+
+ -- all the fields of this input object
+ inpValues = combinators <> map mkFldExpInp fields
+
+ mk n ty = InpValInfo Nothing n $ G.toGT ty
+
+ boolExpListTy = G.toLT boolExpTy
+
+ combinators =
+ [ mk "_not" boolExpTy
+ , mk "_and" boolExpListTy
+ , mk "_or" boolExpListTy
+ ]
+
+ mkFldExpInp = \case
+ Left (PGColInfo colName colTy) ->
+ mk (G.Name $ getPGColTxt colName) (mkCompExpTy colTy)
+ Right (RelInfo relName _ _ remTab _, _) ->
+ mk (G.Name $ getRelTxt relName) (mkBoolExpTy remTab)
+
+mkPGColInp :: PGColInfo -> InpValInfo
+mkPGColInp (PGColInfo colName colTy) =
+ InpValInfo Nothing (G.Name $ getPGColTxt colName) $
+ G.toGT $ mkScalarTy colTy
+
+-- table_set_input
+mkUpdSetTy :: QualifiedTable -> G.NamedType
+mkUpdSetTy tn =
+ G.NamedType $ qualTableToName tn <> "_set_input"
+
+{-
+input table_set_input {
+ col1: colty1
+ .
+ .
+ coln: coltyn
+}
+-}
+mkUpdInp
+ :: QualifiedTable -> [PGColInfo] -> InpObjTyInfo
+mkUpdInp tn cols =
+ InpObjTyInfo (Just desc) (mkUpdSetTy tn) $ fromInpValL $
+ map mkPGColInp cols
+ where
+ desc = G.Description $
+ "input type for updating data in table " <>> tn
+
+{-
+
+update_table(
+ where : table_bool_exp!
+ _set : table_set_input!
+): table_mutation_response
+
+-}
+
+mkUpdMutFld
+ :: QualifiedTable -> ObjFldInfo
+mkUpdMutFld tn =
+ ObjFldInfo (Just desc) fldName (fromInpValL [filterArg, setArg]) $
+ G.toGT $ mkMutRespTy tn
+ where
+ desc = G.Description $ "update data of the table: " <>> tn
+
+ fldName = "update_" <> qualTableToName tn
+
+ filterArgDesc = "filter the rows which have to be updated"
+ filterArg =
+ InpValInfo (Just filterArgDesc) "where" $ G.toGT $
+ G.toNT $ mkBoolExpTy tn
+
+ setArgDesc = "sets the columns of the filtered rows to the given values"
+ setArg =
+ InpValInfo (Just setArgDesc) "_set" $ G.toGT $
+ G.toNT $ mkUpdSetTy tn
+
+{-
+
+delete_table(
+ where : table_bool_exp!
+): table_mutation_response
+
+-}
+
+mkDelMutFld
+ :: QualifiedTable -> ObjFldInfo
+mkDelMutFld tn =
+ ObjFldInfo (Just desc) fldName (fromInpValL [filterArg]) $
+ G.toGT $ mkMutRespTy tn
+ where
+ desc = G.Description $ "delete data from the table: " <>> tn
+
+ fldName = "delete_" <> qualTableToName tn
+
+ filterArgDesc = "filter the rows which have to be deleted"
+ filterArg =
+ InpValInfo (Just filterArgDesc) "where" $ G.toGT $
+ G.toNT $ mkBoolExpTy tn
+
+-- table_insert_input
+mkInsInpTy :: QualifiedTable -> G.NamedType
+mkInsInpTy tn =
+ G.NamedType $ qualTableToName tn <> "_insert_input"
+
+{-
+
+input table_insert_input {
+ col1: colty1
+ .
+ .
+ coln: coltyn
+}
+
+-}
+
+mkInsInp
+ :: QualifiedTable -> [PGColInfo] -> InpObjTyInfo
+mkInsInp tn cols =
+ InpObjTyInfo (Just desc) (mkInsInpTy tn) $ fromInpValL $
+ map mkPGColInp cols
+ where
+ desc = G.Description $
+ "input type for inserting data into table " <>> tn
+
+-- insert_table(objects: [table_insert_input!]!): table_mutation_response
+mkInsMutFld
+ :: QualifiedTable -> ObjFldInfo
+mkInsMutFld tn =
+ ObjFldInfo (Just desc) fldName (fromInpValL [objectsArg]) $
+ G.toGT $ mkMutRespTy tn
+ where
+ desc = G.Description $
+ "insert data into the table: " <>> tn
+
+ fldName = "insert_" <> qualTableToName tn
+
+ objsArgDesc = "the rows to be inserted"
+ objectsArg =
+ InpValInfo (Just objsArgDesc) "objects" $ G.toGT $
+ G.toNT $ G.toLT $ G.toNT $ mkInsInpTy tn
+
+mkOrdByTy :: QualifiedTable -> G.NamedType
+mkOrdByTy tn =
+ G.NamedType $ qualTableToName tn <> "_order_by"
+
+mkOrdByCtx
+ :: QualifiedTable -> [PGColInfo] -> (EnumTyInfo, OrdByResolveCtx)
+mkOrdByCtx tn cols =
+ (enumTyInfo, resolveCtx)
+ where
+ enumTyInfo = EnumTyInfo (Just desc) enumTy $
+ mapFromL _eviVal $ map toEnumValInfo enumValsInt
+ enumTy = mkOrdByTy tn
+ desc = G.Description $
+ "ordering options when selecting data from " <>> tn
+
+ toEnumValInfo (v, enumValDesc, _) =
+ EnumValInfo (Just $ G.Description enumValDesc) (G.EnumValue v) False
+
+ resolveCtx = Map.fromList $ map toResolveCtxPair enumValsInt
+
+ toResolveCtxPair (v, _, ctx) = ((enumTy, G.EnumValue v), ctx)
+
+ enumValsInt = concatMap mkOrdByEnumsOfCol cols
+
+mkOrdByEnumsOfCol
+ :: PGColInfo
+ -> [(G.Name, Text, (PGColInfo, OrdTy, NullsOrder))]
+mkOrdByEnumsOfCol colInfo@(PGColInfo col _) =
+ [ ( colN <> "_asc"
+ , "in the ascending order of " <> col <<> ", nulls last"
+ , (colInfo, OAsc, NLast)
+ )
+ , ( colN <> "_desc"
+ , "in the descending order of " <> col <<> ", nulls last"
+ , (colInfo, ODesc, NLast)
+ )
+ , ( colN <> "_asc_nulls_first"
+ , "in the ascending order of " <> col <<> ", nulls first"
+ , (colInfo, OAsc, NFirst)
+ )
+ , ( colN <> "_desc_nulls_first"
+ , "in the descending order of " <> col <<> ", nulls first"
+ ,(colInfo, ODesc, NFirst)
+ )
+ ]
+ where
+ colN = pgColToFld col
+ pgColToFld = G.Name . getPGColTxt
+
+data RootFlds
+ = RootFlds
+ { _taMutation :: !(Map.HashMap G.Name (OpCtx, Either ObjFldInfo ObjFldInfo))
+ } deriving (Show, Eq)
+
+instance Semigroup RootFlds where
+ (RootFlds m1) <> (RootFlds m2)
+ = RootFlds (Map.union m1 m2)
+
+instance Monoid RootFlds where
+ mempty = RootFlds Map.empty
+ mappend = (<>)
+
+mkGCtxRole'
+ :: QualifiedTable
+ -- insert cols
+ -> Maybe [PGColInfo]
+ -- select permission
+ -> Maybe [SelField]
+ -- update cols
+ -> Maybe [PGColInfo]
+ -- delete cols
+ -> Maybe ()
+ -> TyAgg
+mkGCtxRole' tn insColsM selFldsM updColsM delPermM =
+ TyAgg (mkTyInfoMap allTypes) fieldMap ordByEnums
+
+ where
+
+ ordByEnums = fromMaybe Map.empty ordByResCtxM
+
+ allTypes = catMaybes
+ [ TIInpObj <$> insInpObjM
+ , TIInpObj <$> updSetInpObjM
+ , TIInpObj <$> boolExpInpObjM
+ , TIObj <$> noRelsObjM
+ , TIObj <$> mutRespObjM
+ , TIObj <$> selObjM
+ , TIEnum <$> ordByTyInfoM
+ ]
+
+ fieldMap = Map.unions $ catMaybes
+ [ insInpObjFldsM, updSetInpObjFldsM, boolExpInpObjFldsM
+ , noRelsObjFldsM, selObjFldsM
+ ]
+
+ nameFromSelFld = \case
+ Left colInfo -> G.Name $ getPGColTxt $ pgiName colInfo
+ Right (relInfo, _) -> G.Name $ getRelTxt $ riName relInfo
+
+ -- helper
+ mkColFldMap ty = mapFromL ((ty,) . nameFromSelFld) . map Left
+
+ -- insert input type
+ insInpObjM = mkInsInp tn <$> insColsM
+ -- fields used in insert input object
+ insInpObjFldsM = mkColFldMap (mkInsInpTy tn) <$> insColsM
+
+ -- update set input type
+ updSetInpObjM = mkUpdInp tn <$> updColsM
+ -- fields used in set input object
+ updSetInpObjFldsM = mkColFldMap (mkUpdSetTy tn) <$> updColsM
+
+ -- boolexp input type
+ boolExpInpObjM = case selFldsM of
+ Just selFlds -> Just $ mkBoolExpInp tn selFlds
+ -- no select permission
+ Nothing ->
+ -- but update/delete is defined
+ if isJust updColsM || isJust delPermM
+ then Just $ mkBoolExpInp tn []
+ else Nothing
+
+ -- helper
+ mkFldMap ty = mapFromL ((ty,) . nameFromSelFld)
+ -- the fields used in bool exp
+ boolExpInpObjFldsM = mkFldMap (mkBoolExpTy tn) <$> selFldsM
+
+ -- no rels obj
+ noRelsObjM =
+ if isJust insColsM || isJust updColsM || isJust delPermM
+ then Just $ mkTableNoRelsObj tn $ fromMaybe [] selFldsM
+ else Nothing
+ -- the fields used in returning object
+ noRelsObjFldsM = const (
+ mkColFldMap (mkTableNoRelsTy tn) $ lefts $ fromMaybe [] selFldsM
+ ) <$> noRelsObjM
+
+ -- mut resp obj (only when noRelsObjM is needed)
+ mutRespObjM = const (mkMutRespObj tn) <$> noRelsObjM
+
+ -- table obj
+ selObjM = mkTableObj tn <$> selFldsM
+ -- the fields used in table object
+ selObjFldsM = mkFldMap (mkTableTy tn) <$> selFldsM
+
+ ordByEnumsCtxM = mkOrdByCtx tn . lefts <$> selFldsM
+
+ (ordByTyInfoM, ordByResCtxM) = case ordByEnumsCtxM of
+ (Just (a, b)) -> (Just a, Just b)
+ Nothing -> (Nothing, Nothing)
+
+getRootFldsRole'
+ :: QualifiedTable
+ -> FieldInfoMap
+ -> Maybe (QualifiedTable, [T.Text]) -- insert view
+ -> Maybe (S.BoolExp, [T.Text]) -- select filter
+ -> Maybe (S.BoolExp, [T.Text]) -- update filter
+ -> Maybe (S.BoolExp, [T.Text]) -- delete filter
+ -> RootFlds
+getRootFldsRole' tn fields insM selM updM delM =
+ RootFlds mFlds
+ where
+ mFlds = mapFromL (either _fiName _fiName . snd) $ catMaybes
+ [ getInsDet <$> insM, getSelDet <$> selM
+ , getUpdDet <$> updM, getDelDet <$> delM]
+ colInfos = fst $ partitionFieldInfos $ Map.elems fields
+ getInsDet (vn, hdrs) =
+ (OCInsert tn vn (map pgiName colInfos) hdrs, Right $ mkInsMutFld tn)
+ getUpdDet (updFltr, hdrs) =
+ (OCUpdate tn updFltr hdrs, Right $ mkUpdMutFld tn)
+ getDelDet (delFltr, hdrs) =
+ (OCDelete tn delFltr hdrs, Right $ mkDelMutFld tn)
+ getSelDet (selFltr, hdrs) =
+ (OCSelect tn selFltr hdrs, Left $ mkSelFld tn)
+
+-- getRootFlds
+-- :: TableCache
+-- -> Map.HashMap RoleName RootFlds
+-- getRootFlds tables =
+-- foldr (Map.unionWith mappend . getRootFldsTable) Map.empty $
+-- Map.elems tables
+
+-- gets all the selectable fields (cols and rels) of a
+-- table for a role
+getSelFlds
+ :: (MonadError QErr m)
+ => TableCache
+ -- all the fields of a table
+ -> FieldInfoMap
+ -- role and its permission
+ -> RoleName -> SelPermInfo
+ -> m [SelField]
+getSelFlds tableCache fields role selPermInfo =
+ fmap catMaybes $ forM (Map.elems fields) $ \case
+ FIColumn pgColInfo ->
+ return $ fmap Left $ bool Nothing (Just pgColInfo) $
+ Set.member (pgiName pgColInfo) allowedCols
+ FIRelationship relInfo -> do
+ remTableInfo <- getTabInfo $ riRTable relInfo
+ let remTableSelPermM =
+ Map.lookup role (tiRolePermInfoMap remTableInfo) >>= _permSel
+ return $ fmap (Right . (relInfo,) . spiFilter) remTableSelPermM
+ where
+ allowedCols = spiCols selPermInfo
+ getTabInfo tn =
+ onNothing (Map.lookup tn tableCache) $
+ throw500 $ "remote table not found: " <>> tn
+
+mkGCtxRole
+ :: (MonadError QErr m)
+ => TableCache
+ -> QualifiedTable
+ -> FieldInfoMap
+ -> RoleName
+ -> RolePermInfo
+ -> m (TyAgg, RootFlds)
+mkGCtxRole tableCache tn fields role permInfo = do
+ selFldsM <- mapM (getSelFlds tableCache fields role) $ _permSel permInfo
+ let insColsM = const colInfos <$> _permIns permInfo
+ updColsM = filterColInfos . upiCols <$> _permUpd permInfo
+ tyAgg = mkGCtxRole' tn insColsM selFldsM updColsM
+ (void $ _permDel permInfo)
+ rootFlds = getRootFldsRole tn fields permInfo
+ return (tyAgg, rootFlds)
+ where
+ colInfos = fst $ partitionFieldInfos $ Map.elems fields
+ filterColInfos allowedSet =
+ filter ((`Set.member` allowedSet) . pgiName) colInfos
+
+getRootFldsRole
+ :: QualifiedTable
+ -> FieldInfoMap
+ -> RolePermInfo
+ -> RootFlds
+getRootFldsRole tn fields (RolePermInfo insM selM updM delM) =
+ getRootFldsRole' tn fields
+ (mkIns <$> insM) (mkSel <$> selM)
+ (mkUpd <$> updM) (mkDel <$> delM)
+ where
+ mkIns i = (ipiView i, ipiRequiredHeaders i)
+ mkSel s = (spiFilter s, spiRequiredHeaders s)
+ mkUpd u = (upiFilter u, upiRequiredHeaders u)
+ mkDel d = (dpiFilter d, dpiRequiredHeaders d)
+
+mkGCtxMapTable
+ :: (MonadError QErr m)
+ => TableCache
+ -> TableInfo
+ -> m (Map.HashMap RoleName (TyAgg, RootFlds))
+mkGCtxMapTable tableCache (TableInfo tn fields rolePerms) = do
+ m <- Map.traverseWithKey (mkGCtxRole tableCache tn fields) rolePerms
+ let adminCtx = mkGCtxRole' tn (Just colInfos)
+ (Just selFlds) (Just colInfos) (Just ())
+ return $ Map.insert adminRole (adminCtx, adminRootFlds) m
+ where
+ colInfos = fst $ partitionFieldInfos $ Map.elems fields
+ selFlds = flip map (Map.elems fields) $ \case
+ FIColumn pgColInfo -> Left pgColInfo
+ FIRelationship relInfo -> Right (relInfo, noFilter)
+ noFilter = S.BELit True
+ adminRootFlds =
+ getRootFldsRole' tn fields (Just (tn, [])) (Just (noFilter, []))
+ (Just (noFilter, [])) (Just (noFilter, []))
+
+mkScalarTyInfo :: PGColType -> ScalarTyInfo
+mkScalarTyInfo = ScalarTyInfo Nothing
+
+type GCtxMap = Map.HashMap RoleName GCtx
+
+mkGCtxMap
+ :: (MonadError QErr m)
+ => TableCache -> m (Map.HashMap RoleName GCtx)
+mkGCtxMap tableCache = do
+ typesMapL <- mapM (mkGCtxMapTable tableCache) $ Map.elems tableCache
+ let typesMap = foldr (Map.unionWith mappend) Map.empty typesMapL
+ return $ Map.map (uncurry mkGCtx) typesMap
+
+mkGCtx :: TyAgg -> RootFlds -> GCtx
+mkGCtx (TyAgg tyInfos fldInfos ordByEnums) (RootFlds flds) =
+ let queryRoot = mkObjTyInfo (Just "query root") (G.NamedType "query_root") $
+ mapFromL _fiName (schemaFld:typeFld:qFlds)
+ colTys = Set.toList $ Set.fromList $ map pgiType $
+ lefts $ Map.elems fldInfos
+ scalarTys = map (TIScalar . mkScalarTyInfo) colTys
+ compTys = map (TIInpObj . mkCompExpInp) colTys
+ allTys = Map.union tyInfos $ mkTyInfoMap $
+ catMaybes [Just $ TIObj queryRoot, TIObj <$> mutRootM] <>
+ scalarTys <> compTys <> defaultTypes
+ in GCtx allTys fldInfos ordByEnums queryRoot mutRootM $ Map.map fst flds
+ where
+
+ mkMutRoot =
+ mkObjTyInfo (Just "mutation root") (G.NamedType "mutation_root") .
+ mapFromL _fiName
+
+ mutRootM = bool (Just $ mkMutRoot mFlds) Nothing $ null mFlds
+
+ (qFlds, mFlds) = partitionEithers $ map snd $ Map.elems flds
+
+ schemaFld = ObjFldInfo Nothing "__schema" Map.empty $ G.toGT $
+ G.toNT $ G.NamedType "__Schema"
+
+ typeFld = ObjFldInfo Nothing "__type" typeFldArgs $ G.toGT $
+ G.NamedType "__Type"
+ where
+ typeFldArgs = mapFromL _iviName [
+ InpValInfo (Just "name of the type") "name"
+ $ G.toGT $ G.toNT $ G.NamedType "String"
+ ]
+
+getGCtx :: RoleName -> Map.HashMap RoleName GCtx -> GCtx
+getGCtx rn =
+ fromMaybe (mkGCtx mempty mempty) . Map.lookup rn
diff --git a/server/src-lib/Hasura/GraphQL/Utils.hs b/server/src-lib/Hasura/GraphQL/Utils.hs
new file mode 100644
index 00000000..20503e30
--- /dev/null
+++ b/server/src-lib/Hasura/GraphQL/Utils.hs
@@ -0,0 +1,89 @@
+{-# LANGUAGE FlexibleContexts #-}
+{-# LANGUAGE LambdaCase #-}
+{-# LANGUAGE NoImplicitPrelude #-}
+{-# LANGUAGE OverloadedStrings #-}
+
+module Hasura.GraphQL.Utils
+ ( onNothing
+ , showName
+ , showNamedTy
+ , throwVE
+ , getBaseTy
+ , mapFromL
+ , groupTuples
+ , groupListWith
+ , mkMapWith
+ , onLeft
+ , showNames
+ ) where
+
+import Hasura.RQL.Types
+import Hasura.Prelude
+
+import qualified Data.HashMap.Strict as Map
+import qualified Data.List.NonEmpty as NE
+import qualified Data.Text as T
+import qualified Language.GraphQL.Draft.Syntax as G
+
+showName :: G.Name -> Text
+showName name = "\"" <> G.unName name <> "\""
+
+onNothing :: (Monad m) => Maybe a -> m a -> m a
+onNothing m act = maybe act return m
+
+throwVE :: (MonadError QErr m) => Text -> m a
+throwVE = throw400 ValidationFailed
+
+showNamedTy :: G.NamedType -> Text
+showNamedTy nt =
+ "'" <> G.showNT nt <> "'"
+
+getBaseTy :: G.GType -> G.NamedType
+getBaseTy = \case
+ G.TypeNamed n -> n
+ G.TypeList lt -> getBaseTyL lt
+ G.TypeNonNull nnt -> getBaseTyNN nnt
+ where
+ getBaseTyL = getBaseTy . G.unListType
+ getBaseTyNN = \case
+ G.NonNullTypeList lt -> getBaseTyL lt
+ G.NonNullTypeNamed n -> n
+
+mapFromL :: (Eq k, Hashable k) => (a -> k) -> [a] -> Map.HashMap k a
+mapFromL f l =
+ Map.fromList [(f v, v) | v <- l]
+
+groupListWith
+ :: (Eq k, Hashable k, Foldable t, Functor t)
+ => (v -> k) -> t v -> Map.HashMap k (NE.NonEmpty v)
+groupListWith f l =
+ groupTuples $ fmap (\v -> (f v, v)) l
+
+groupTuples
+ :: (Eq k, Hashable k, Foldable t)
+ => t (k, v) -> Map.HashMap k (NE.NonEmpty v)
+groupTuples =
+ foldr groupFlds Map.empty
+ where
+ groupFlds (k, v) m = case Map.lookup k m of
+ Nothing -> Map.insert k (v NE.:| []) m
+ Just s -> Map.insert k (v NE.<| s) m
+
+-- either duplicate keys or the map
+mkMapWith
+ :: (Eq k, Hashable k, Foldable t, Functor t)
+ => (v -> k) -> t v -> Either (NE.NonEmpty k) (Map.HashMap k v)
+mkMapWith f l =
+ case NE.nonEmpty dups of
+ Just dupsNE -> Left dupsNE
+ Nothing -> Right $ Map.map NE.head mapG
+ where
+ mapG = groupListWith f l
+ dups = Map.keys $ Map.filter ((> 1) . length) mapG
+
+onLeft :: (Monad m) => Either e a -> (e -> m a) -> m a
+onLeft e f = either f return e
+
+showNames :: (Foldable t) => t G.Name -> Text
+showNames names =
+ T.intercalate ", " $ map G.unName $ toList names
diff --git a/server/src-lib/Hasura/GraphQL/Validate/Context.hs b/server/src-lib/Hasura/GraphQL/Validate/Context.hs
new file mode 100644
index 00000000..30475866
--- /dev/null
+++ b/server/src-lib/Hasura/GraphQL/Validate/Context.hs
@@ -0,0 +1,73 @@
+{-# LANGUAGE FlexibleContexts #-}
+{-# LANGUAGE FlexibleInstances #-}
+{-# LANGUAGE MultiParamTypeClasses #-}
+{-# LANGUAGE NoImplicitPrelude #-}
+{-# LANGUAGE OverloadedStrings #-}
+
+module Hasura.GraphQL.Validate.Context
+ ( ValidationCtx(..)
+ , getFieldInfo
+ , getInpFieldInfo
+ , getTyInfo
+ , getTyInfoVE
+ , module Hasura.GraphQL.Utils
+ ) where
+
+import Hasura.Prelude
+
+import qualified Data.HashMap.Strict as Map
+import qualified Language.GraphQL.Draft.Syntax as G
+
+import Data.Has
+import Hasura.GraphQL.Utils
+import Hasura.GraphQL.Validate.Types
+import Hasura.RQL.Types
+
+getFieldInfo
+ :: ( MonadError QErr m)
+ => ObjTyInfo -> G.Name -> m ObjFldInfo
+getFieldInfo oti fldName =
+ onNothing (Map.lookup fldName $ _otiFields oti) $ throwVE $
+ "field " <> showName fldName <>
+ " not found in type: " <> showNamedTy (_otiName oti)
+
+getInpFieldInfo
+ :: ( MonadError QErr m)
+ => InpObjTyInfo -> G.Name -> m G.GType
+getInpFieldInfo tyInfo fldName =
+ fmap _iviType $ onNothing (Map.lookup fldName $ _iotiFields tyInfo) $
+ throwVE $ "field " <> showName fldName <>
+ " not found in type: " <> showNamedTy (_iotiName tyInfo)
+
+data ValidationCtx
+ = ValidationCtx
+ { _vcTypeMap :: !TypeMap
+ -- these are in the scope of the operation
+ , _vcVarVals :: !AnnVarVals
+ -- all the fragments
+ , _vcFragDefMap :: !FragDefMap
+ } deriving (Show, Eq)
+
+instance Has TypeMap ValidationCtx where
+ getter = _vcTypeMap
+ modifier f ctx = ctx { _vcTypeMap = f $ _vcTypeMap ctx }
+
+getTyInfo
+ :: ( MonadReader r m , Has TypeMap r
+ , MonadError QErr m)
+ => G.NamedType
+ -> m TypeInfo
+getTyInfo namedTy = do
+ tyMap <- asks getter
+ onNothing (Map.lookup namedTy tyMap) $
+ throw500 $ "type info not found for: " <> showNamedTy namedTy
+
+getTyInfoVE
+ :: ( MonadReader r m , Has TypeMap r
+ , MonadError QErr m)
+ => G.NamedType
+ -> m TypeInfo
+getTyInfoVE namedTy = do
+ tyMap <- asks getter
+ onNothing (Map.lookup namedTy tyMap) $
+ throwVE $ "no such type exists in the schema: " <> showNamedTy namedTy
diff --git a/server/src-lib/Hasura/GraphQL/Validate/Field.hs b/server/src-lib/Hasura/GraphQL/Validate/Field.hs
new file mode 100644
index 00000000..c7510d37
--- /dev/null
+++ b/server/src-lib/Hasura/GraphQL/Validate/Field.hs
@@ -0,0 +1,315 @@
+{-# LANGUAGE FlexibleContexts #-}
+{-# LANGUAGE NoImplicitPrelude #-}
+{-# LANGUAGE OverloadedStrings #-}
+{-# LANGUAGE TemplateHaskell #-}
+
+module Hasura.GraphQL.Validate.Field
+ ( ArgsMap
+ , Field(..)
+ , SelSet
+ , denormSelSet
+ ) where
+
+import Hasura.Prelude
+
+import qualified Data.Aeson as J
+import qualified Data.Aeson.Casing as J
+import qualified Data.Aeson.TH as J
+import qualified Data.HashMap.Strict as Map
+import qualified Data.List as L
+import qualified Data.Sequence as Seq
+import qualified Data.Text as T
+import qualified Hasura.GraphQL.NonEmptySeq as NE
+import qualified Hasura.GraphQL.OrderedMap as OMap
+import qualified Language.GraphQL.Draft.Syntax as G
+
+import Hasura.GraphQL.Validate.Context
+import Hasura.GraphQL.Validate.InputValue
+import Hasura.GraphQL.Validate.Types
+import Hasura.RQL.Types
+
+-- data ScalarInfo
+-- = SIBuiltin !GBuiltin
+-- | SICustom !PGColType
+-- deriving (Show, Eq)
+
+-- data GBuiltin
+-- = GInt
+-- | GFloat
+-- | GBoolean
+-- | GString
+-- deriving (Show, Eq)
+
+data TypedOperation
+ = TypedOperation
+ { _toType :: !G.OperationType
+ , _toName :: !(Maybe G.Name)
+ , _toSelectionSet :: ![Field]
+ } deriving (Show, Eq)
+
+type ArgsMap = Map.HashMap G.Name AnnGValue
+
+type SelSet = Seq.Seq Field
+
+data Field
+ = Field
+ { _fAlias :: !G.Alias
+ , _fName :: !G.Name
+ , _fType :: !G.NamedType
+ , _fArguments :: !ArgsMap
+ , _fSelSet :: !SelSet
+ } deriving (Eq, Show)
+
+$(J.deriveToJSON (J.aesonDrop 2 J.camelCase){J.omitNothingFields=True}
+ ''Field
+ )
+
+-- newtype FieldMapAlias
+-- = FieldMapAlias
+-- { unFieldMapAlias :: Map.HashMap G.Alias (FieldG FieldMapAlias)
+-- } deriving (Show, Eq)
+
+-- newtype FieldMapName
+-- = FieldMapName
+-- { unFieldMapName :: Map.HashMap G.Name (NE.NonEmpty (FieldG FieldMapName))
+-- } deriving (Show, Eq)
+
+-- type Field = FieldG FieldMapAlias
+
+-- type FieldGrouped = FieldG FieldMapName
+
+-- toFieldGrouped :: Field -> FieldGrouped
+-- toFieldGrouped =
+-- fmap groupFields
+-- where
+-- groupFields m =
+-- FieldMapName $ groupTuples $
+-- flip map (Map.elems $ unFieldMapAlias m) $ \fld ->
+-- (_fName fld, toFieldGrouped fld)
+
+data FieldGroupSrc
+ = FGSFragSprd !G.Name
+ | FGSInlnFrag
+ deriving (Show, Eq)
+
+data FieldGroup
+ = FieldGroup
+ { _fgSource :: !FieldGroupSrc
+ , _fgFields :: !(Seq.Seq Field)
+ } deriving (Show, Eq)
+
+-- data GLoc
+-- = GLoc
+-- { _glLine :: !Int
+-- , _glColumn :: !Int
+-- } deriving (Show, Eq)
+
+-- data GErr
+-- = GErr
+-- { _geMessage :: !Text
+-- , _geLocations :: ![GLoc]
+-- } deriving (Show, Eq)
+
+-- throwGE :: (MonadError QErr m) => Text -> m a
+-- throwGE msg = throwError $ QErr msg []
+
+withDirectives
+ :: ( MonadReader ValidationCtx m
+ , MonadError QErr m)
+ => [G.Directive]
+ -> m a
+ -> m (Maybe a)
+withDirectives dirs act =
+ -- TODO, use the directives
+ Just <$> act
+
+denormSel
+ :: ( MonadReader ValidationCtx m
+ , MonadError QErr m)
+ => [G.Name] -- visited fragments
+ -> ObjTyInfo -- parent type info
+ -> G.Selection
+ -> m (Maybe (Either Field FieldGroup))
+denormSel visFrags parObjTyInfo sel = case sel of
+ G.SelectionField fld -> do
+ fldInfo <- getFieldInfo parObjTyInfo $ G._fName fld
+ fmap Left <$> denormFld visFrags fldInfo fld
+ G.SelectionFragmentSpread fragSprd ->
+ fmap Right <$> denormFrag visFrags parTy fragSprd
+ G.SelectionInlineFragment inlnFrag ->
+ fmap Right <$> denormInlnFrag visFrags parObjTyInfo inlnFrag
+ where
+ parTy = _otiName parObjTyInfo
+
+processArgs
+ :: ( MonadReader ValidationCtx m
+ , MonadError QErr m)
+ => ObjFldInfo
+ -> [G.Argument]
+ -> m (Map.HashMap G.Name AnnGValue)
+processArgs (ObjFldInfo _ fldName fldParams fldTy) argsL = do
+
+ args <- onLeft (mkMapWith G._aName argsL) $ \dups ->
+ throwVE $ "the following arguments are defined more than once: " <>
+ showNames dups
+
+ let requiredParams = Map.filter (G.isNotNull . _iviType) fldParams
+
+ inpArgs <- forM args $ \(G.Argument argName argVal) -> do
+ argTy <- getArgTy argName
+ validateInputValue valueParser argTy argVal
+
+ forM_ requiredParams $ \argDef -> do
+ let param = _iviName argDef
+ onNothing (Map.lookup param inpArgs) $ throwVE $ mconcat
+ [ "the required argument ", showName param
+ , " is missing on field ", showName fldName
+ ]
+
+ return inpArgs
+
+ where
+ getArgTy argName =
+ onNothing (_iviType <$> Map.lookup argName fldParams) $ throwVE $
+ "no such argument " <> showName argName <> " defined on " <>
+ "field " <> showName fldName
+
+denormFld
+ :: ( MonadReader ValidationCtx m
+ , MonadError QErr m)
+ => [G.Name] -- visited fragments
+ -> ObjFldInfo
+ -> G.Field
+ -> m (Maybe Field)
+denormFld visFrags fldInfo (G.Field aliasM name args dirs selSet) = do
+
+ let fldTy = _fiTy fldInfo
+ fldBaseTy = getBaseTy fldTy
+
+ fldTyInfo <- getTyInfo fldBaseTy
+
+ argMap <- processArgs fldInfo args
+
+ fields <- case (fldTyInfo, selSet) of
+
+ (TIObj _, []) ->
+ throwVE $ "field " <> showName name <> " of type "
+ <> G.showGT fldTy <> " must have a selection of subfields"
+
+ (TIObj fldObjTyInfo, _) ->
+ denormSelSet visFrags fldObjTyInfo selSet
+
+ (TIScalar _, []) -> return Seq.empty
+ (TIEnum _, []) -> return Seq.empty
+
+ (TIInpObj _, _) ->
+ throwVE $ "internal error: unexpected input type for field: "
+ <> showName name
+
+ -- when scalar/enum and no empty set
+ (_, _) ->
+ throwVE $ "field " <> showName name <> " must not have a "
+ <> "selection since type " <> G.showGT fldTy <> " has no subfields"
+
+ withDirectives dirs $ return $
+ Field (fromMaybe (G.Alias name) aliasM) name fldBaseTy argMap fields
+
+denormInlnFrag
+ :: ( MonadReader ValidationCtx m
+ , MonadError QErr m)
+ => [G.Name] -- visited fragments
+ -> ObjTyInfo -- type information of the field
+ -> G.InlineFragment
+ -> m (Maybe FieldGroup)
+denormInlnFrag visFrags fldTyInfo inlnFrag = do
+ let fldTy = _otiName fldTyInfo
+ let fragTy = fromMaybe fldTy tyM
+ when (fldTy /= fragTy) $
+ throwVE $ "inline fragment is expected on type " <>
+ showNamedTy fldTy <> " but found " <> showNamedTy fragTy
+ withDirectives directives $ fmap (FieldGroup FGSInlnFrag) $
+ denormSelSet visFrags fldTyInfo selSet
+ where
+ G.InlineFragment tyM directives selSet = inlnFrag
+
+denormSelSet
+ :: ( MonadReader ValidationCtx m
+ , MonadError QErr m)
+ => [G.Name] -- visited fragments
+ -> ObjTyInfo
+ -> G.SelectionSet
+ -> m (Seq.Seq Field)
+denormSelSet visFrags fldTyInfo selSet = do
+ resFlds <- catMaybes <$> mapM (denormSel visFrags fldTyInfo) selSet
+ mergeFields $ foldl' flatten Seq.empty resFlds
+ where
+ flatten s (Left fld) = s Seq.|> fld
+ flatten s (Right (FieldGroup _ flds)) =
+ s Seq.>< flds
+
+mergeFields
+ :: ( MonadReader ValidationCtx m
+ , MonadError QErr m)
+ => Seq.Seq Field
+ -> m (Seq.Seq Field)
+mergeFields flds =
+ fmap Seq.fromList $ forM fldGroups $ \fieldGroup -> do
+ newFld <- checkMergeability fieldGroup
+ childFields <- mergeFields $ foldl' (\l f -> l Seq.>< _fSelSet f) Seq.empty $
+ NE.toSeq fieldGroup
+ return $ newFld {_fSelSet = childFields}
+ where
+ fldGroups = OMap.elems $ OMap.groupListWith _fAlias flds
+ -- can a group be merged?
+ checkMergeability fldGroup = do
+ let groupedFlds = toList $ NE.toSeq fldGroup
+ fldNames = L.nub $ map _fName groupedFlds
+ args = L.nub $ map _fArguments groupedFlds
+ fld = NE.head fldGroup
+ fldAl = _fAlias fld
+ when (length fldNames > 1) $
+ throwVE $ "cannot merge different fields under the same alias ("
+ <> showName (G.unAlias fldAl) <> "): "
+ <> showNames fldNames
+ when (length args > 1) $
+ throwVE $ "cannot merge fields with different arguments"
+ <> " under the same alias: "
+ <> showName (G.unAlias fldAl)
+ return fld
+
+onJust :: (Monad m) => Maybe a -> (a -> m ()) -> m ()
+onJust m act = maybe (return ()) act m
+
+denormFrag
+ :: ( MonadReader ValidationCtx m
+ , MonadError QErr m)
+ => [G.Name] -- visited fragments
+ -> G.NamedType -- parent type
+ -> G.FragmentSpread
+ -> m (Maybe FieldGroup)
+denormFrag visFrags parTy (G.FragmentSpread name directives) = do
+
+ -- check for cycles
+ when (name `elem` visFrags) $
+ throwVE $ "cannot spread fragment " <> showName name <> " within itself via "
+ <> T.intercalate "," (map G.unName visFrags)
+
+ (FragDef _ fragTyInfo selSet) <- getFragInfo
+
+ let fragTy = _otiName fragTyInfo
+
+ -- we don't have unions or interfaces so we can get away with equality
+ when (fragTy /= parTy) $
+ throwVE $ "cannot spread fragment " <> showName name <> " defined on " <>
+ showNamedTy fragTy <> " when selecting fields of type " <> showNamedTy parTy
+
+ resFlds <- withPathK "selset" $ denormSelSet (name:visFrags) fragTyInfo selSet
+
+ withPathK "directives" $ withDirectives directives $
+ return $ FieldGroup (FGSFragSprd name) resFlds
+
+ where
+ getFragInfo = do
+ dctx <- ask
+ onNothing (Map.lookup name $ _vcFragDefMap dctx) $
+ throwVE $ "fragment '" <> G.unName name <> "' not found"
diff --git a/server/src-lib/Hasura/GraphQL/Validate/InputValue.hs b/server/src-lib/Hasura/GraphQL/Validate/InputValue.hs
new file mode 100644
index 00000000..780a6514
--- /dev/null
+++ b/server/src-lib/Hasura/GraphQL/Validate/InputValue.hs
@@ -0,0 +1,267 @@
+{-# LANGUAGE FlexibleContexts #-}
+{-# LANGUAGE MultiWayIf #-}
+{-# LANGUAGE NoImplicitPrelude #-}
+{-# LANGUAGE OverloadedStrings #-}
+
+module Hasura.GraphQL.Validate.InputValue
+ ( validateInputValue
+ , jsonParser
+ , valueParser
+ , constValueParser
+ ) where
+
+import Data.Scientific (fromFloatDigits)
+import Hasura.Prelude
+
+import Data.Has
+
+import qualified Data.Aeson as J
+import qualified Data.HashMap.Strict as Map
+import qualified Data.Text as T
+import qualified Data.Vector as V
+import qualified Language.GraphQL.Draft.Syntax as G
+
+import Hasura.GraphQL.Utils
+import Hasura.GraphQL.Validate.Context
+import Hasura.GraphQL.Validate.Types
+import Hasura.RQL.Types
+import Hasura.SQL.Value
+
+newtype P a = P { unP :: Maybe (Either AnnGValue a)}
+
+pNull :: (Monad m) => m (P a)
+pNull = return $ P Nothing
+
+pVal :: (Monad m) => a -> m (P a)
+pVal = return . P . Just . Right
+
+resolveVar
+ :: ( MonadError QErr m
+ , MonadReader ValidationCtx m)
+ => G.Variable -> m AnnGValue
+resolveVar var = do
+ varVals <- _vcVarVals <$> ask
+ -- TODO typecheck
+ onNothing (Map.lookup var varVals) $
+ throwVE $ "no such variable defined in the operation: "
+ <> showName (G.unVariable var)
+ where
+ typeCheck expectedTy actualTy = case (expectedTy, actualTy) of
+ -- named types
+ (G.TypeNamed eTy, G.TypeNamed aTy) -> eTy == aTy
+ -- non null type can be expected for a null type
+ (G.TypeNamed eTy, G.TypeNonNull (G.NonNullTypeNamed aTy)) -> eTy == aTy
+
+ -- list types
+ (G.TypeList eTy, G.TypeList aTy) ->
+ typeCheck (G.unListType eTy) (G.unListType aTy)
+ (G.TypeList eTy, G.TypeNonNull (G.NonNullTypeList aTy)) ->
+ typeCheck (G.unListType eTy) (G.unListType aTy)
+
+ -- non null types
+ (G.TypeNonNull (G.NonNullTypeList eTy), G.TypeNonNull (G.NonNullTypeList aTy)) ->
+ typeCheck (G.unListType eTy) (G.unListType aTy)
+ (G.TypeNonNull (G.NonNullTypeNamed eTy), G.TypeNonNull (G.NonNullTypeNamed aTy)) ->
+ eTy == aTy
+ (_, _) -> False
+
+pVar
+ :: ( MonadError QErr m
+ , MonadReader ValidationCtx m)
+ => G.Variable -> m (P a)
+pVar var = do
+ annInpVal <- resolveVar var
+ return . P . Just . Left $ annInpVal
+
+data InputValueParser a m
+ = InputValueParser
+ { getScalar :: a -> m (P J.Value)
+ , getList :: a -> m (P [a])
+ , getObject :: a -> m (P [(G.Name, a)])
+ , getEnum :: a -> m (P G.EnumValue)
+ }
+
+jsonParser :: (MonadError QErr m) => InputValueParser J.Value m
+jsonParser =
+ InputValueParser jScalar jList jObject jEnum
+ where
+ jEnum (J.String t) = pVal $ G.EnumValue $ G.Name t
+ jEnum J.Null = pNull
+ jEnum _ = throwVE "expecting a JSON string for Enum"
+
+ jList (J.Array v) = pVal $ V.toList v
+ jList J.Null = pNull
+ jList _ = throwVE "expecting a JSON array"
+
+ jObject (J.Object m) = pVal [(G.Name t, v) | (t, v) <- Map.toList m]
+ jObject J.Null = pNull
+ jObject _ = throwVE "expecting a JSON object"
+
+ jScalar J.Null = pNull
+ jScalar v = pVal v
+
+valueParser
+ :: ( MonadError QErr m
+ , MonadReader ValidationCtx m)
+ => InputValueParser G.Value m
+valueParser =
+ InputValueParser pScalar pList pObject pEnum
+ where
+ pEnum (G.VVariable var) = pVar var
+ pEnum (G.VEnum e) = pVal e
+ pEnum G.VNull = pNull
+ pEnum _ = throwVE "expecting an enum"
+
+ pList (G.VVariable var) = pVar var
+ pList (G.VList lv) = pVal $ G.unListValue lv
+ pList G.VNull = pNull
+ pList _ = throwVE "expecting a list"
+
+ pObject (G.VVariable var) = pVar var
+ pObject (G.VObject ov) = pVal
+ [(G._ofName oFld, G._ofValue oFld) | oFld <- G.unObjectValue ov]
+ pObject G.VNull = pNull
+ pObject _ = throwVE "expecting an object"
+
+ -- scalar json
+ pScalar (G.VVariable var) = pVar var
+ pScalar G.VNull = pNull
+ pScalar (G.VInt v) = pVal $ J.Number $ fromIntegral v
+ pScalar (G.VFloat v) = pVal $ J.Number $ fromFloatDigits v
+ pScalar (G.VBoolean b) = pVal $ J.Bool b
+ pScalar (G.VString sv) = pVal $ J.String $ G.unStringValue sv
+ pScalar (G.VEnum _) = throwVE "unexpected enum for a scalar"
+ pScalar (G.VObject _) = throwVE "unexpected object for a scalar"
+ pScalar (G.VList _) = throwVE "unexpected array for a scalar"
+
+constValueParser :: (MonadError QErr m) => InputValueParser G.ValueConst m
+constValueParser =
+ InputValueParser pScalar pList pObject pEnum
+ where
+ pEnum (G.VCEnum e) = pVal e
+ pEnum G.VCNull = pNull
+ pEnum _ = throwVE "expecting an enum"
+
+ pList (G.VCList lv) = pVal $ G.unListValue lv
+ pList G.VCNull = pNull
+ pList _ = throwVE "expecting a list"
+
+ pObject (G.VCObject ov) = pVal
+ [(G._ofName oFld, G._ofValue oFld) | oFld <- G.unObjectValue ov]
+ pObject G.VCNull = pNull
+ pObject _ = throwVE "expecting an object"
+
+ -- scalar json
+ pScalar G.VCNull = pNull
+ pScalar (G.VCInt v) = pVal $ J.Number $ fromIntegral v
+ pScalar (G.VCFloat v) = pVal $ J.Number $ fromFloatDigits v
+ pScalar (G.VCBoolean b) = pVal $ J.Bool b
+ pScalar (G.VCString sv) = pVal $ J.String $ G.unStringValue sv
+ pScalar (G.VCEnum _) = throwVE "unexpected enum for a scalar"
+ pScalar (G.VCObject _) = throwVE "unexpected object for a scalar"
+ pScalar (G.VCList _) = throwVE "unexpected array for a scalar"
+
+validateObject
+ :: ( MonadReader r m, Has TypeMap r
+ , MonadError QErr m
+ )
+ => InputValueParser a m
+ -> InpObjTyInfo -> [(G.Name, a)] -> m AnnGObject
+validateObject valParser tyInfo flds = do
+
+ when (dupFlds /= []) $
+ throwVE $ "when parsing a value of type: " <> showNamedTy (_iotiName tyInfo)
+ <> ", the following fields are duplicated: "
+ <> T.intercalate ", " (map showName dupFlds)
+
+ -- TODO: need to check for required arguments
+
+ fmap Map.fromList $ forM flds $ \(fldName, fldVal) -> do
+ fldTy <- getInpFieldInfo tyInfo fldName
+ convFldVal <- validateInputValue valParser fldTy fldVal
+ return (fldName, convFldVal)
+
+ where
+ dupFlds = mapMaybe listToMaybe $ filter ((>) 1 . length) $
+ group $ map fst flds
+
+validateNamedTypeVal
+ :: ( MonadReader r m, Has TypeMap r
+ , MonadError QErr m)
+ => InputValueParser a m
+ -> G.NamedType -> a -> m AnnGValue
+validateNamedTypeVal inpValParser nt val = do
+ tyInfo <- getTyInfo nt
+ case tyInfo of
+ -- this should never happen
+ TIObj _ ->
+ throw500 $ "unexpected object type info for: "
+ <> showNamedTy nt
+ TIInpObj ioti ->
+ withParsed (getObject inpValParser) val $ \mObj ->
+ AGObject nt <$> (mapM $ validateObject inpValParser ioti) mObj
+ TIEnum eti ->
+ withParsed (getEnum inpValParser) val $ \mEnumVal ->
+ AGEnum nt <$> (mapM $ validateEnum eti) mEnumVal
+ TIScalar (ScalarTyInfo _ pgColTy) ->
+ withParsed (getScalar inpValParser) val $ \mScalar ->
+ AGScalar pgColTy <$> (mapM $ validateScalar pgColTy) mScalar
+ where
+ validateEnum enumTyInfo enumVal =
+ if Map.member enumVal (_etiValues enumTyInfo)
+ then return enumVal
+ else throwVE $ "unexpected value " <>
+ showName (G.unEnumValue enumVal) <>
+ " for enum: " <> showNamedTy nt
+ validateScalar pgColTy =
+ runAesonParser (parsePGValue pgColTy)
+
+validateList
+ :: (MonadError QErr m, MonadReader r m, Has TypeMap r)
+ => InputValueParser a m
+ -> G.ListType
+ -> a
+ -> m AnnGValue
+validateList inpValParser listTy val =
+ withParsed (getList inpValParser) val $ \lM -> do
+ let baseTy = G.unListType listTy
+ AGArray listTy <$> mapM (mapM (validateInputValue inpValParser baseTy)) lM
+
+validateNonNull
+ :: (MonadError QErr m, MonadReader r m, Has TypeMap r)
+ => InputValueParser a m
+ -> G.NonNullType
+ -> a
+ -> m AnnGValue
+validateNonNull inpValParser nonNullTy val = do
+ parsedVal <- case nonNullTy of
+ G.NonNullTypeNamed nt -> validateNamedTypeVal inpValParser nt val
+ G.NonNullTypeList lt -> validateList inpValParser lt val
+ when (hasNullVal parsedVal) $
+ throwVE $ "unexpected null value for type: " <> G.showGT (G.TypeNonNull nonNullTy)
+ return parsedVal
+
+validateInputValue
+ :: (MonadError QErr m, MonadReader r m, Has TypeMap r)
+ => InputValueParser a m
+ -> G.GType
+ -> a
+ -> m AnnGValue
+validateInputValue inpValParser ty val =
+ case ty of
+ G.TypeNamed nt -> validateNamedTypeVal inpValParser nt val
+ G.TypeList lt -> validateList inpValParser lt val
+ G.TypeNonNull nnt -> validateNonNull inpValParser nnt val
+
+withParsed
+ :: (Monad m)
+ => (val -> m (P specificVal))
+ -> val
+ -> (Maybe specificVal -> m AnnGValue)
+ -> m AnnGValue
+withParsed valParser val fn = do
+ parsedVal <- valParser val
+ case unP parsedVal of
+ Nothing -> fn Nothing
+ Just (Right a) -> fn $ Just a
+ Just (Left annVal) -> return annVal
diff --git a/server/src-lib/Hasura/GraphQL/Validate/Types.hs b/server/src-lib/Hasura/GraphQL/Validate/Types.hs
new file mode 100644
index 00000000..c844d958
--- /dev/null
+++ b/server/src-lib/Hasura/GraphQL/Validate/Types.hs
@@ -0,0 +1,303 @@
+{-# LANGUAGE DeriveLift #-}
+{-# LANGUAGE FlexibleContexts #-}
+{-# LANGUAGE LambdaCase #-}
+{-# LANGUAGE NoImplicitPrelude #-}
+{-# LANGUAGE OverloadedStrings #-}
+{-# LANGUAGE TemplateHaskell #-}
+
+module Hasura.GraphQL.Validate.Types
+ ( InpValInfo(..)
+ , ParamMap
+ , ObjFldInfo(..)
+ , ObjFieldMap
+ , ObjTyInfo(..)
+ , mkObjTyInfo
+ , FragDef(..)
+ , FragDefMap
+ , AnnVarVals
+ , EnumTyInfo(..)
+ , EnumValInfo(..)
+ , InpObjFldMap
+ , InpObjTyInfo(..)
+ , ScalarTyInfo(..)
+ , DirectiveInfo(..)
+ , defaultDirectives
+ , defaultSchema
+ , TypeInfo(..)
+ , isObjTy
+ , getObjTyM
+ , mkScalarTy
+ , pgColTyToScalar
+ , getNamedTy
+ , mkTyInfoMap
+ , fromTyDef
+ , fromTyDefQ
+ , fromSchemaDocQ
+ , TypeMap
+ , AnnGValue(..)
+ , AnnGObject
+ , hasNullVal
+ , getAnnInpValKind
+ , getAnnInpValTy
+ , module Hasura.GraphQL.Utils
+ ) where
+
+import Hasura.Prelude
+
+import qualified Data.Aeson as J
+import qualified Data.HashMap.Strict as Map
+import qualified Data.Text as T
+import qualified Language.GraphQL.Draft.Syntax as G
+import qualified Language.GraphQL.Draft.TH as G
+import qualified Language.Haskell.TH.Syntax as TH
+
+import Hasura.GraphQL.Utils
+import Hasura.SQL.Types
+import Hasura.SQL.Value
+
+data EnumValInfo
+ = EnumValInfo
+ { _eviDesc :: !(Maybe G.Description)
+ , _eviVal :: !G.EnumValue
+ , _eviIsDeprecated :: !Bool
+ } deriving (Show, Eq, TH.Lift)
+
+fromEnumValDef :: G.EnumValueDefinition -> EnumValInfo
+fromEnumValDef (G.EnumValueDefinition descM val _) =
+ EnumValInfo descM val False
+
+data EnumTyInfo
+ = EnumTyInfo
+ { _etiDesc :: !(Maybe G.Description)
+ , _etiName :: !G.NamedType
+ , _etiValues :: !(Map.HashMap G.EnumValue EnumValInfo)
+ } deriving (Show, Eq, TH.Lift)
+
+fromEnumTyDef :: G.EnumTypeDefinition -> EnumTyInfo
+fromEnumTyDef (G.EnumTypeDefinition descM n _ valDefs) =
+ EnumTyInfo descM (G.NamedType n) $ Map.fromList
+ [(G._evdName valDef, fromEnumValDef valDef) | valDef <- valDefs]
+
+data InpValInfo
+ = InpValInfo
+ { _iviDesc :: !(Maybe G.Description)
+ , _iviName :: !G.Name
+ , _iviType :: !G.GType
+ -- TODO, handle default values
+ } deriving (Show, Eq, TH.Lift)
+
+fromInpValDef :: G.InputValueDefinition -> InpValInfo
+fromInpValDef (G.InputValueDefinition descM n ty _) =
+ InpValInfo descM n ty
+
+type ParamMap = Map.HashMap G.Name InpValInfo
+
+data ObjFldInfo
+ = ObjFldInfo
+ { _fiDesc :: !(Maybe G.Description)
+ , _fiName :: !G.Name
+ , _fiParams :: !ParamMap
+ , _fiTy :: !G.GType
+ } deriving (Show, Eq, TH.Lift)
+
+fromFldDef :: G.FieldDefinition -> ObjFldInfo
+fromFldDef (G.FieldDefinition descM n args ty _) =
+ ObjFldInfo descM n params ty
+ where
+ params = Map.fromList [(G._ivdName arg, fromInpValDef arg) | arg <- args]
+
+type ObjFieldMap = Map.HashMap G.Name ObjFldInfo
+
+data ObjTyInfo
+ = ObjTyInfo
+ { _otiDesc :: !(Maybe G.Description)
+ , _otiName :: !G.NamedType
+ , _otiFields :: !ObjFieldMap
+ } deriving (Show, Eq, TH.Lift)
+
+mkObjTyInfo
+ :: Maybe G.Description -> G.NamedType -> ObjFieldMap -> ObjTyInfo
+mkObjTyInfo descM ty flds =
+ ObjTyInfo descM ty $ Map.insert (_fiName typenameFld) typenameFld flds
+
+typenameFld :: ObjFldInfo
+typenameFld =
+ ObjFldInfo (Just desc) "__typename" Map.empty $
+ G.toGT $ G.toNT $ G.NamedType "String"
+ where
+ desc = "The name of the current Object type at runtime"
+
+fromObjTyDef :: G.ObjectTypeDefinition -> ObjTyInfo
+fromObjTyDef (G.ObjectTypeDefinition descM n _ _ flds) =
+ mkObjTyInfo descM (G.NamedType n) $
+ Map.fromList [(G._fldName fld, fromFldDef fld) | fld <- flds]
+
+type InpObjFldMap = Map.HashMap G.Name InpValInfo
+
+data InpObjTyInfo
+ = InpObjTyInfo
+ { _iotiDesc :: !(Maybe G.Description)
+ , _iotiName :: !G.NamedType
+ , _iotiFields :: !InpObjFldMap
+ } deriving (Show, Eq, TH.Lift)
+
+fromInpObjTyDef :: G.InputObjectTypeDefinition -> InpObjTyInfo
+fromInpObjTyDef (G.InputObjectTypeDefinition descM n _ inpFlds) =
+ InpObjTyInfo descM (G.NamedType n) $
+ Map.fromList [(G._ivdName inpFld, fromInpValDef inpFld) | inpFld <- inpFlds]
+
+data ScalarTyInfo
+ = ScalarTyInfo
+ { _stiDesc :: !(Maybe G.Description)
+ , _stiType :: !PGColType
+ } deriving (Show, Eq, TH.Lift)
+
+fromScalarTyDef :: G.ScalarTypeDefinition -> Either Text ScalarTyInfo
+fromScalarTyDef (G.ScalarTypeDefinition descM n _) =
+ ScalarTyInfo descM <$> case n of
+ "Int" -> return PGInteger
+ "Float" -> return PGFloat
+ "String" -> return PGText
+ "Boolean" -> return PGBoolean
+ -- TODO: is this correct?
+ "ID" -> return PGText
+ _ -> throwError $ "unexpected type: " <> G.unName n
+
+data TypeInfo
+ = TIScalar !ScalarTyInfo
+ | TIObj !ObjTyInfo
+ | TIEnum !EnumTyInfo
+ | TIInpObj !InpObjTyInfo
+ deriving (Show, Eq, TH.Lift)
+
+isObjTy :: TypeInfo -> Bool
+isObjTy = \case
+ (TIObj _) -> True
+ _ -> False
+
+getObjTyM :: TypeInfo -> Maybe ObjTyInfo
+getObjTyM = \case
+ (TIObj t) -> return t
+ _ -> Nothing
+
+-- map postgres types to builtin scalars
+pgColTyToScalar :: PGColType -> Text
+pgColTyToScalar = \case
+ PGInteger -> "Int"
+ PGBoolean -> "Boolean"
+ PGFloat -> "Float"
+ PGText -> "String"
+ t -> T.pack $ show t
+
+mkScalarTy :: PGColType -> G.NamedType
+mkScalarTy =
+ G.NamedType . G.Name . pgColTyToScalar
+
+getNamedTy :: TypeInfo -> G.NamedType
+getNamedTy = \case
+ TIScalar t -> mkScalarTy $ _stiType t
+ TIObj t -> _otiName t
+ TIEnum t -> _etiName t
+ TIInpObj t -> _iotiName t
+
+mkTyInfoMap :: [TypeInfo] -> TypeMap
+mkTyInfoMap tyInfos =
+ Map.fromList [(getNamedTy tyInfo, tyInfo) | tyInfo <- tyInfos]
+
+fromTyDef :: G.TypeDefinition -> Either Text TypeInfo
+fromTyDef = \case
+ G.TypeDefinitionScalar t -> TIScalar <$> fromScalarTyDef t
+ G.TypeDefinitionObject t -> return $ TIObj $ fromObjTyDef t
+ G.TypeDefinitionInterface t ->
+ throwError $ "unexpected interface: " <> showName (G._itdName t)
+ G.TypeDefinitionUnion t ->
+ throwError $ "unexpected union: " <> showName (G._utdName t)
+ G.TypeDefinitionEnum t -> return $ TIEnum $ fromEnumTyDef t
+ G.TypeDefinitionInputObject t -> return $ TIInpObj $ fromInpObjTyDef t
+
+fromTyDefQ :: G.TypeDefinition -> TH.Q TH.Exp
+fromTyDefQ tyDef = case fromTyDef tyDef of
+ Left e -> fail $ T.unpack e
+ Right t -> TH.lift t
+
+fromSchemaDocQ :: G.SchemaDocument -> TH.Q TH.Exp
+fromSchemaDocQ (G.SchemaDocument tyDefs) =
+ TH.ListE <$> mapM fromTyDefQ tyDefs
+
+defaultSchema :: G.SchemaDocument
+defaultSchema = $(G.parseSchemaDocQ "src-rsr/schema.graphql")
+
+-- fromBaseSchemaFileQ :: FilePath -> TH.Q TH.Exp
+-- fromBaseSchemaFileQ fp =
+-- fromSchemaDocQ $(G.parseSchemaDocQ fp)
+
+type TypeMap = Map.HashMap G.NamedType TypeInfo
+
+data DirectiveInfo
+ = DirectiveInfo
+ { _diDescription :: !(Maybe G.Description)
+ , _diName :: !G.Name
+ , _diParams :: !ParamMap
+ , _diLocations :: ![G.DirectiveLocation]
+ } deriving (Show, Eq)
+
+-- TODO: generate this from template haskell once we have a parser for directive defs
+-- directive @skip(if: Boolean!) on FIELD | FRAGMENT_SPREAD | INLINE_FRAGMENT
+defaultDirectives :: [DirectiveInfo]
+defaultDirectives =
+ [mkDirective "skip", mkDirective "include"]
+ where
+ mkDirective n = DirectiveInfo Nothing n args dirLocs
+ args = Map.singleton "if" $ InpValInfo Nothing "if" $
+ G.TypeNamed $ G.NamedType $ G.Name "Boolean"
+ dirLocs = map G.DLExecutable
+ [G.EDLFIELD, G.EDLFRAGMENT_SPREAD, G.EDLINLINE_FRAGMENT]
+
+data FragDef
+ = FragDef
+ { _fdName :: !G.Name
+ , _fdTyInfo :: !ObjTyInfo
+ , _fdSelSet :: !G.SelectionSet
+ } deriving (Show, Eq)
+
+type FragDefMap = Map.HashMap G.Name FragDef
+
+type AnnVarVals =
+ Map.HashMap G.Variable AnnGValue
+
+type AnnGObject = Map.HashMap G.Name AnnGValue
+
+data AnnGValue
+ = AGScalar !PGColType !(Maybe PGColValue)
+ | AGEnum !G.NamedType !(Maybe G.EnumValue)
+ | AGObject !G.NamedType !(Maybe AnnGObject)
+ | AGArray !G.ListType !(Maybe [AnnGValue])
+ deriving (Show, Eq)
+
+instance J.ToJSON AnnGValue where
+ -- toJSON (AGScalar ty valM) =
+ toJSON = const J.Null
+ -- J.
+ -- J.toJSON [J.toJSON ty, J.toJSON valM]
+
+hasNullVal :: AnnGValue -> Bool
+hasNullVal = \case
+ AGScalar _ Nothing -> True
+ AGEnum _ Nothing -> True
+ AGObject _ Nothing -> True
+ AGArray _ Nothing -> True
+ _ -> False
+
+getAnnInpValKind :: AnnGValue -> Text
+getAnnInpValKind = \case
+ AGScalar _ _ -> "scalar"
+ AGEnum _ _ -> "enum"
+ AGObject _ _ -> "object"
+ AGArray _ _ -> "array"
+
+getAnnInpValTy :: AnnGValue -> G.GType
+getAnnInpValTy = \case
+ AGScalar pct _ -> G.TypeNamed $ G.NamedType $ G.Name $ T.pack $ show pct
+ AGEnum nt _ -> G.TypeNamed nt
+ AGObject nt _ -> G.TypeNamed nt
+ AGArray nt _ -> G.TypeList nt
diff --git a/server/src-lib/Hasura/Prelude.hs b/server/src-lib/Hasura/Prelude.hs
new file mode 100644
index 00000000..641e98a6
--- /dev/null
+++ b/server/src-lib/Hasura/Prelude.hs
@@ -0,0 +1,23 @@
+module Hasura.Prelude
+ ( module M
+ ) where
+
+import Control.Applicative as M ((<|>))
+import Control.Monad as M (void, when)
+import Control.Monad.Except as M
+import Control.Monad.Identity as M
+import Control.Monad.Reader as M
+import Control.Monad.State as M
+import Data.Bool as M (bool)
+import Data.Either as M (lefts, partitionEithers, rights)
+import Data.Foldable as M (toList)
+import Data.Hashable as M (Hashable)
+import Data.List as M (foldl', group, sortBy, find)
+import Data.Maybe as M (catMaybes, fromMaybe, isJust,
+ listToMaybe, mapMaybe,
+ maybeToList)
+import Data.Ord as M (comparing)
+import Data.Semigroup as M (Semigroup (..))
+import Data.Text as M (Text)
+import Prelude as M hiding (fail, init, lookup)
+import Text.Read as M (readEither, readMaybe)
diff --git a/server/src-lib/Hasura/RQL/DDL/Deps.hs b/server/src-lib/Hasura/RQL/DDL/Deps.hs
new file mode 100644
index 00000000..b80b1d39
--- /dev/null
+++ b/server/src-lib/Hasura/RQL/DDL/Deps.hs
@@ -0,0 +1,109 @@
+{-# LANGUAGE FlexibleContexts #-}
+{-# LANGUAGE OverloadedStrings #-}
+{-# LANGUAGE QuasiQuotes #-}
+
+module Hasura.RQL.DDL.Deps
+ ( purgeRel
+ , parseDropNotice
+ , getIndirectDeps
+ , reportDeps
+ , reportDepsExt
+ )
+ where
+
+import Hasura.Prelude
+
+import qualified Data.HashSet as HS
+import qualified Data.Text as T
+import qualified Database.PG.Query as Q
+
+import Hasura.RQL.Types
+import Hasura.SQL.Types
+
+purgeRel :: QualifiedTable -> RelName -> Q.Tx ()
+purgeRel (QualifiedTable sn tn) rn =
+ Q.unitQ [Q.sql|
+ DELETE FROM hdb_catalog.hdb_relationship
+ WHERE table_schema = $1
+ AND table_name = $2
+ AND rel_name = $3
+ |] (sn, tn, rn) False
+
+reportDeps :: (QErrM m) => [SchemaObjId] -> m ()
+reportDeps deps =
+ throw400 DependencyError $
+ "cannot drop due to the following dependent objects : "
+ <> reportSchemaObjs deps
+
+reportDepsExt :: (QErrM m) => [SchemaObjId] -> [T.Text] -> m ()
+reportDepsExt deps unknownDeps =
+ throw400 DependencyError $
+ "cannot drop due to the following dependent objects : " <> depObjsTxt
+ where
+ depObjsTxt = T.intercalate ", " (reportSchemaObjs deps:unknownDeps)
+
+parseDropNotice :: (QErrM m ) => T.Text -> m [Either T.Text SchemaObjId]
+parseDropNotice t = do
+ cascadeLines <- getCascadeLines
+ mapM parseCascadeLine cascadeLines
+ where
+ dottedTxtToQualTable dt =
+ case T.split (=='.') dt of
+ [tn] -> return $ QualifiedTable publicSchema $ TableName tn
+ [sn, tn] -> return $ QualifiedTable (SchemaName sn) $ TableName tn
+ _ -> throw400 ParseFailed $ "parsing dotted table failed : " <> dt
+
+ getCascadeLines = do
+ detailLines <- case T.stripPrefix "NOTICE:" t of
+ Just rest -> case T.splitOn "DETAIL:" $ T.strip rest of
+ [singleDetail] -> return [singleDetail]
+ [_, detailTxt] -> return $ T.lines $ T.strip detailTxt
+ _ -> throw500 "splitOn DETAIL has unexpected structure"
+ Nothing -> throw500 "unexpected beginning of notice"
+ let cascadeLines = mapMaybe (T.stripPrefix "drop cascades to") detailLines
+ when (length detailLines /= length cascadeLines) $
+ throw500 "unexpected lines in drop notice"
+ return $ map T.strip cascadeLines
+
+ parseCascadeLine cl
+ | T.isPrefixOf "view" cl =
+ case T.words cl of
+ [_, vn] -> do
+ qt <- dottedTxtToQualTable vn
+ return $ Right $ SOTable qt
+ _ -> throw500 $ "failed to parse view cascade line : " <> cl
+ | T.isPrefixOf "constraint" cl =
+ case T.words cl of
+ [_, cn, _, _, tn] -> do
+ qt <- dottedTxtToQualTable tn
+ return $ Right $ SOTableObj qt $
+ TOCons $ ConstraintName cn
+ _ -> throw500 $ "failed to parse constraint cascade line : " <> cl
+ | otherwise = return $ Left cl
+
+getPGDeps :: Q.Tx () -> Q.TxE QErr [Either T.Text SchemaObjId]
+getPGDeps tx = do
+ dropNotices <- Q.catchE defaultTxErrorHandler $ do
+ Q.unitQ "SAVEPOINT hdb_get_pg_deps" () False
+ dropNotices <- snd <$> Q.withNotices tx
+ Q.unitQ "ROLLBACK TO SAVEPOINT hdb_get_pg_deps" () False
+ Q.unitQ "RELEASE SAVEPOINT hdb_get_pg_deps" () False
+ return dropNotices
+ case dropNotices of
+ [] -> return []
+ [notice] -> parseDropNotice notice
+ _ -> throw500 "unexpected number of notices when getting dependencies"
+
+getIndirectDeps
+ :: (CacheRM m, MonadTx m)
+ => [SchemaObjId] -> Q.Tx ()
+ -> m ([SchemaObjId], [T.Text])
+getIndirectDeps initDeps tx = do
+ sc <- askSchemaCache
+ -- Now, trial run the drop sql to get pg dependencies
+ pgDeps <- liftTx $ getPGDeps tx
+ let (unparsedLines, parsedObjIds) = partitionEithers pgDeps
+ indirectDeps = HS.fromList $ parsedObjIds <>
+ concatMap (getDependentObjs sc) parsedObjIds
+ newDeps = indirectDeps `HS.difference` (HS.fromList initDeps)
+ return (HS.toList newDeps, unparsedLines)
diff --git a/server/src-lib/Hasura/RQL/DDL/Metadata.hs b/server/src-lib/Hasura/RQL/DDL/Metadata.hs
new file mode 100644
index 00000000..cbc44b92
--- /dev/null
+++ b/server/src-lib/Hasura/RQL/DDL/Metadata.hs
@@ -0,0 +1,361 @@
+{-# LANGUAGE DeriveLift #-}
+{-# LANGUAGE FlexibleContexts #-}
+{-# LANGUAGE OverloadedStrings #-}
+{-# LANGUAGE QuasiQuotes #-}
+{-# LANGUAGE TemplateHaskell #-}
+{-# LANGUAGE TypeFamilies #-}
+
+module Hasura.RQL.DDL.Metadata
+ ( ReplaceMetadata(..)
+ , TableMeta(..)
+ , tmObjectRelationships
+ , tmArrayRelationships
+ , tmInsertPermissions
+ , tmSelectPermissions
+ , tmUpdatePermissions
+ , tmDeletePermissions
+
+ , mkTableMeta
+ , applyQP1
+ , applyQP2
+
+ , DumpInternalState(..)
+
+ , ExportMetadata(..)
+ , fetchMetadata
+
+ , ClearMetadata(..)
+ , clearMetadata
+ ) where
+
+import Control.Lens
+import Data.Aeson
+import Data.Aeson.Casing
+import Data.Aeson.TH
+import Language.Haskell.TH.Syntax (Lift)
+
+import qualified Data.HashMap.Strict as M
+import qualified Data.HashSet as HS
+import qualified Data.List as L
+import qualified Data.Text as T
+
+import Hasura.Prelude
+import Hasura.RQL.DDL.Utils
+import Hasura.RQL.Types
+import Hasura.SQL.Types
+
+import qualified Database.PG.Query as Q
+import qualified Hasura.RQL.DDL.Permission as DP
+import qualified Hasura.RQL.DDL.QueryTemplate as DQ
+import qualified Hasura.RQL.DDL.Relationship as DR
+import qualified Hasura.RQL.DDL.Schema.Table as DT
+
+data TableMeta
+ = TableMeta
+ { _tmTable :: !QualifiedTable
+ , _tmObjectRelationships :: ![DR.ObjRelDef]
+ , _tmArrayRelationships :: ![DR.ArrRelDef]
+ , _tmInsertPermissions :: ![DP.InsPermDef]
+ , _tmSelectPermissions :: ![DP.SelPermDef]
+ , _tmUpdatePermissions :: ![DP.UpdPermDef]
+ , _tmDeletePermissions :: ![DP.DelPermDef]
+ } deriving (Show, Eq, Lift)
+
+mkTableMeta :: QualifiedTable -> TableMeta
+mkTableMeta qt =
+ TableMeta qt [] [] [] [] [] []
+
+makeLenses ''TableMeta
+
+instance FromJSON TableMeta where
+ parseJSON (Object o) = do
+ unless (null unexpectedKeys) $
+ fail $ "unexpected keys when parsing TableMetadata : "
+ <> show (HS.toList unexpectedKeys)
+
+ TableMeta
+ <$> o .: tableKey
+ <*> o .:? orKey .!= []
+ <*> o .:? arKey .!= []
+ <*> o .:? ipKey .!= []
+ <*> o .:? spKey .!= []
+ <*> o .:? upKey .!= []
+ <*> o .:? dpKey .!= []
+
+ where
+ tableKey = "table"
+ orKey = "object_relationships"
+ arKey = "array_relationships"
+ ipKey = "insert_permissions"
+ spKey = "select_permissions"
+ upKey = "update_permissions"
+ dpKey = "delete_permissions"
+
+ unexpectedKeys =
+ HS.fromList (M.keys o) `HS.difference` expectedKeySet
+
+ expectedKeySet =
+ HS.fromList [ tableKey, orKey, arKey, ipKey
+ , spKey, upKey, dpKey
+ ]
+
+ parseJSON _ =
+ fail "expecting an Object for TableMetadata"
+
+$(deriveToJSON (aesonDrop 3 snakeCase){omitNothingFields=True} ''TableMeta)
+
+data ClearMetadata
+ = ClearMetadata
+ deriving (Show, Eq, Lift)
+$(deriveToJSON defaultOptions ''ClearMetadata)
+
+instance FromJSON ClearMetadata where
+ parseJSON _ = return ClearMetadata
+
+clearMetadata :: Q.TxE QErr ()
+clearMetadata = Q.catchE defaultTxErrorHandler $ do
+ Q.unitQ "DELETE FROM hdb_catalog.hdb_query_template WHERE is_system_defined <> 'true'" () False
+ Q.unitQ "DELETE FROM hdb_catalog.hdb_permission WHERE is_system_defined <> 'true'" () False
+ Q.unitQ "DELETE FROM hdb_catalog.hdb_relationship WHERE is_system_defined <> 'true'" () False
+ Q.unitQ "DELETE FROM hdb_catalog.hdb_table WHERE is_system_defined <> 'true'" () False
+ Q.unitQ clearHdbViews () False
+
+instance HDBQuery ClearMetadata where
+
+ type Phase1Res ClearMetadata = ()
+ phaseOne _ = adminOnly
+
+ phaseTwo _ _ = do
+ newSc <- liftTx $ clearMetadata >> DT.buildSchemaCache
+ writeSchemaCache newSc
+ return successMsg
+
+ schemaCachePolicy = SCPReload
+
+data ReplaceMetadata
+ = ReplaceMetadata
+ { aqTables :: ![TableMeta]
+ , aqQueryTemplates :: ![DQ.CreateQueryTemplate]
+ } deriving (Show, Eq, Lift)
+
+$(deriveJSON (aesonDrop 2 snakeCase){omitNothingFields=True} ''ReplaceMetadata)
+
+applyQP1 :: ReplaceMetadata -> P1 ()
+applyQP1 (ReplaceMetadata tables templates) = do
+
+ adminOnly
+
+ withPathK "tables" $ do
+
+ checkMultipleDecls "tables" $ map _tmTable tables
+
+ -- process each table
+ void $ indexedForM tables $ \table -> withTableName (table ^. tmTable) $ do
+ let allRels = map DR.rdName (table ^. tmObjectRelationships) <>
+ map DR.rdName (table ^. tmArrayRelationships)
+
+ insPerms = map DP.pdRole $ table ^. tmInsertPermissions
+ selPerms = map DP.pdRole $ table ^. tmSelectPermissions
+ updPerms = map DP.pdRole $ table ^. tmUpdatePermissions
+ delPerms = map DP.pdRole $ table ^. tmDeletePermissions
+
+ checkMultipleDecls "relationships" allRels
+ checkMultipleDecls "insert permissions" insPerms
+ checkMultipleDecls "select permissions" selPerms
+ checkMultipleDecls "update permissions" updPerms
+ checkMultipleDecls "delete permissions" delPerms
+
+ withPathK "queryTemplates" $
+ checkMultipleDecls "query templates" $ map DQ.cqtName templates
+
+ where
+ withTableName qt = withPathK (qualTableToTxt qt)
+
+ checkMultipleDecls t l = do
+ let dups = getDups l
+ unless (null dups) $
+ throw400 AlreadyExists $ "multiple declarations exist for the following " <> t <> " : "
+ <> T.pack (show dups)
+
+ getDups l =
+ l L.\\ HS.toList (HS.fromList l)
+
+applyQP2 :: (UserInfoM m, P2C m) => ReplaceMetadata -> m RespBody
+applyQP2 (ReplaceMetadata tables templates) = do
+
+ defaultSchemaCache <- liftTx $ clearMetadata >> DT.buildSchemaCache
+ writeSchemaCache defaultSchemaCache
+
+ withPathK "tables" $ do
+
+ -- tables and views
+ indexedForM_ (map _tmTable tables) $ \tableName ->
+ void $ DT.trackExistingTableOrViewP2 tableName
+
+ -- Relationships
+ indexedForM_ tables $ \table -> do
+ withPathK "object_relationships" $
+ indexedForM_ (table ^. tmObjectRelationships) $ \objRel ->
+ DR.objRelP2 (table ^. tmTable) objRel
+ withPathK "array_relationships" $
+ indexedForM_ (table ^. tmArrayRelationships) $ \arrRel ->
+ DR.arrRelP2 (table ^. tmTable) arrRel
+
+ -- Permissions
+ indexedForM_ tables $ \table -> do
+ let tableName = table ^. tmTable
+ tabInfo <- modifyErrAndSet500 ("apply " <> ) $ askTabInfo tableName
+ withPathK "insert_permissions" $ processPerms tabInfo $
+ table ^. tmInsertPermissions
+ withPathK "select_permissions" $ processPerms tabInfo $
+ table ^. tmSelectPermissions
+ withPathK "update_permissions" $ processPerms tabInfo $
+ table ^. tmUpdatePermissions
+ withPathK "delete_permissions" $ processPerms tabInfo $
+ table ^. tmDeletePermissions
+
+ -- query templates
+ withPathK "queryTemplates" $
+ indexedForM_ templates $ \template -> do
+ qti <- DQ.createQueryTemplateP1 template
+ void $ DQ.createQueryTemplateP2 template qti
+
+ return successMsg
+
+ where
+ processPerms tabInfo perms =
+ indexedForM_ perms $ \permDef -> do
+ permInfo <- DP.addPermP1 tabInfo permDef
+ DP.addPermP2 (tiName tabInfo) permDef permInfo
+
+
+instance HDBQuery ReplaceMetadata where
+
+ type Phase1Res ReplaceMetadata = ()
+ phaseOne = applyQP1
+
+ phaseTwo q _ = applyQP2 q
+
+ schemaCachePolicy = SCPReload
+
+data ExportMetadata
+ = ExportMetadata
+ deriving (Show, Eq, Lift)
+
+instance FromJSON ExportMetadata where
+ parseJSON _ = return ExportMetadata
+
+$(deriveToJSON defaultOptions ''ExportMetadata)
+
+fetchMetadata :: Q.TxE QErr ReplaceMetadata
+fetchMetadata = do
+ tables <- Q.catchE defaultTxErrorHandler fetchTables
+
+ let qts = map (uncurry QualifiedTable) tables
+ tableMetaMap = M.fromList $ zip qts $ map mkTableMeta qts
+
+ -- Fetch all the relationships
+ relationships <- Q.catchE defaultTxErrorHandler fetchRelationships
+
+ objRelDefs <- mkRelDefs ObjRel relationships
+ arrRelDefs <- mkRelDefs ArrRel relationships
+
+ -- Fetch all the permissions
+ permissions <- Q.catchE defaultTxErrorHandler fetchPermissions
+
+ -- Parse all the permissions
+ insPermDefs <- mkPermDefs PTInsert permissions
+ selPermDefs <- mkPermDefs PTSelect permissions
+ updPermDefs <- mkPermDefs PTUpdate permissions
+ delPermDefs <- mkPermDefs PTDelete permissions
+
+ -- Fetch all the query templates
+ qTmpltRows <- Q.catchE defaultTxErrorHandler fetchQTemplates
+
+ qTmpltDefs <- forM qTmpltRows $ \(qtn, Q.AltJ qtDefVal, mComment) -> do
+ qtDef <- decodeValue qtDefVal
+ return $ DQ.CreateQueryTemplate qtn qtDef mComment
+
+ let (_, postRelMap) = flip runState tableMetaMap $ do
+ modMetaMap tmObjectRelationships objRelDefs
+ modMetaMap tmArrayRelationships arrRelDefs
+ modMetaMap tmInsertPermissions insPermDefs
+ modMetaMap tmSelectPermissions selPermDefs
+ modMetaMap tmUpdatePermissions updPermDefs
+ modMetaMap tmDeletePermissions delPermDefs
+
+ return $ ReplaceMetadata (M.elems postRelMap) qTmpltDefs
+
+ where
+
+ modMetaMap l xs = do
+ st <- get
+ put $ foldr (\(qt, dfn) b -> b & at qt._Just.l %~ (:) dfn) st xs
+
+ mkPermDefs pt = mapM permRowToDef . filter (\pr -> pr ^. _4 == pt)
+
+ permRowToDef (sn, tn, rn, _, Q.AltJ pDef, mComment) = do
+ perm <- decodeValue pDef
+ return (QualifiedTable sn tn, DP.PermDef rn perm mComment)
+
+ mkRelDefs rt = mapM relRowToDef . filter (\rr -> rr ^. _4 == rt)
+
+ relRowToDef (sn, tn, rn, _, Q.AltJ rDef, mComment) = do
+ using <- decodeValue rDef
+ return (QualifiedTable sn tn, DR.RelDef rn using mComment)
+
+ fetchTables =
+ Q.listQ [Q.sql|
+ SELECT table_schema, table_name from hdb_catalog.hdb_table
+ WHERE is_system_defined = 'false'
+ |] () False
+
+ fetchRelationships =
+ Q.listQ [Q.sql|
+ SELECT table_schema, table_name, rel_name, rel_type, rel_def::json, comment
+ FROM hdb_catalog.hdb_relationship
+ WHERE is_system_defined = 'false'
+ |] () False
+
+ fetchPermissions =
+ Q.listQ [Q.sql|
+ SELECT table_schema, table_name, role_name, perm_type, perm_def::json, comment
+ FROM hdb_catalog.hdb_permission
+ WHERE is_system_defined = 'false'
+ |] () False
+
+ fetchQTemplates =
+ Q.listQ [Q.sql|
+ SELECT template_name, template_defn :: json, comment
+ FROM hdb_catalog.hdb_query_template
+ WHERE is_system_defined = 'false'
+ |] () False
+
+instance HDBQuery ExportMetadata where
+
+ type Phase1Res ExportMetadata = ()
+ phaseOne _ = adminOnly
+
+ phaseTwo _ _ = encode <$> liftTx fetchMetadata
+
+ schemaCachePolicy = SCPNoChange
+
+data DumpInternalState
+ = DumpInternalState
+ deriving (Show, Eq, Lift)
+
+instance FromJSON DumpInternalState where
+ parseJSON _ = return DumpInternalState
+
+$(deriveToJSON defaultOptions ''DumpInternalState)
+
+instance HDBQuery DumpInternalState where
+
+ type Phase1Res DumpInternalState = ()
+ phaseOne _ = adminOnly
+
+ phaseTwo _ _ = do
+ sc <- askSchemaCache
+ return $ encode sc
+
+ schemaCachePolicy = SCPNoChange
diff --git a/server/src-lib/Hasura/RQL/DDL/Permission.hs b/server/src-lib/Hasura/RQL/DDL/Permission.hs
new file mode 100644
index 00000000..bd8f3db0
--- /dev/null
+++ b/server/src-lib/Hasura/RQL/DDL/Permission.hs
@@ -0,0 +1,413 @@
+{-# LANGUAGE DeriveLift #-}
+{-# LANGUAGE FlexibleContexts #-}
+{-# LANGUAGE FlexibleInstances #-}
+{-# LANGUAGE OverloadedStrings #-}
+{-# LANGUAGE QuasiQuotes #-}
+{-# LANGUAGE TemplateHaskell #-}
+{-# LANGUAGE TypeFamilies #-}
+
+module Hasura.RQL.DDL.Permission
+ ( CreatePerm
+ , SetPermComment(..)
+ , purgePerm
+ , PermDef(..)
+
+
+ , InsPerm(..)
+ , InsPermDef
+ , CreateInsPerm
+ , clearInsInfra
+ , buildInsInfra
+ , buildInsPermInfo
+ , DropInsPerm
+ , dropInsPermP2
+
+ , SelPerm(..)
+ , SelPermDef
+ , CreateSelPerm
+ , buildSelPermInfo
+ , DropSelPerm
+ , dropSelPermP2
+
+ , UpdPerm(..)
+ , UpdPermDef
+ , CreateUpdPerm
+ , buildUpdPermInfo
+ , DropUpdPerm
+ , dropUpdPermP2
+
+ , DelPerm(..)
+ , DelPermDef
+ , CreateDelPerm
+ , buildDelPermInfo
+ , DropDelPerm
+ , dropDelPermP2
+
+ , IsPerm(..)
+ , addPermP1
+ , addPermP2
+ ) where
+
+import Hasura.RQL.DDL.Permission.Internal
+import Hasura.RQL.Types
+import Hasura.SQL.Types
+import Hasura.Prelude
+
+import qualified Database.PG.Query as Q
+import qualified Hasura.SQL.DML as S
+
+import Data.Aeson.Casing
+import Data.Aeson.TH
+import Language.Haskell.TH.Syntax (Lift)
+
+import qualified Data.ByteString.Builder as BB
+import qualified Data.HashSet as HS
+import qualified Data.Text as T
+
+-- Insert permission
+data InsPerm
+ = InsPerm
+ { icCheck :: !BoolExp
+ , icAllowUpsert :: !(Maybe Bool)
+ } deriving (Show, Eq, Lift)
+
+$(deriveJSON (aesonDrop 2 snakeCase){omitNothingFields=True} ''InsPerm)
+
+type InsPermDef = PermDef InsPerm
+type CreateInsPerm = CreatePerm InsPerm
+
+buildViewName :: QualifiedTable -> RoleName -> PermType -> QualifiedTable
+buildViewName (QualifiedTable sn tn) (RoleName rTxt) pt =
+ QualifiedTable hdbViewsSchema $ TableName
+ (rTxt <> "__" <> T.pack (show pt) <> "__" <> snTxt <> "__" <> tnTxt)
+ where
+ hdbViewsSchema = SchemaName "hdb_views"
+ snTxt = getSchemaTxt sn
+ tnTxt = getTableTxt tn
+
+buildView :: QualifiedTable -> QualifiedTable -> Q.Query
+buildView tn vn =
+ Q.fromBuilder $ mconcat
+ [ BB.string7 "CREATE VIEW " <> toSQL vn
+ , BB.string7 " AS SELECT * FROM " <> toSQL tn
+ ]
+
+dropView :: QualifiedTable -> Q.Tx ()
+dropView vn =
+ Q.unitQ dropViewS () False
+ where
+ dropViewS = Q.fromBuilder $
+ BB.string7 "DROP VIEW " <> toSQL vn
+
+buildInsTrig :: QualifiedTable -> Q.Query
+buildInsTrig qt@(QualifiedTable _ tn) =
+ Q.fromBuilder $ mconcat
+ [ BB.string7 "CREATE TRIGGER " <> toSQL tn
+ , BB.string7 " INSTEAD OF INSERT ON " <> toSQL qt
+ , BB.string7 " FOR EACH ROW EXECUTE PROCEDURE "
+ , toSQL qt <> BB.string7 "();"
+ ]
+
+dropInsTrigFn :: QualifiedTable -> Q.Query
+dropInsTrigFn fn =
+ Q.fromBuilder $ BB.string7 "DROP FUNCTION " <> toSQL fn <> "()"
+
+buildInsTrigFn :: QualifiedTable -> QualifiedTable -> S.BoolExp -> Q.Query
+buildInsTrigFn fn tn be =
+ Q.fromBuilder $ mconcat
+ [ BB.string7 "CREATE OR REPLACE FUNCTION " <> toSQL fn
+ , BB.string7 "() RETURNS trigger LANGUAGE plpgsql AS $$ "
+ , BB.string7 "DECLARE r " <> toSQL tn <> "%ROWTYPE; "
+ , BB.string7 "BEGIN "
+ , BB.string7 "IF (" <> toSQL be <> BB.string7 ") "
+ , BB.string7 "THEN INSERT INTO " <> toSQL tn
+ , BB.string7 " VALUES (NEW.*) RETURNING * INTO r; RETURN r; "
+ , BB.string7 "ELSE RAISE check_violation using message = 'insert check constraint failed'; return NULL;"
+ , BB.string7 "END IF; "
+ , BB.string7 "END "
+ , BB.string7 "$$;"
+ ]
+
+buildInsPermInfo
+ :: (QErrM m, CacheRM m)
+ => TableInfo
+ -> PermDef InsPerm
+ -> m InsPermInfo
+buildInsPermInfo tabInfo (PermDef rn (InsPerm chk upsrt) _) = do
+ (be, beDeps) <- withPathK "check" $
+ procBoolExp tn fieldInfoMap (S.QualVar "NEW") chk
+ let deps = mkParentDep tn : beDeps
+ depHeaders = getDependentHeaders chk
+ return $ InsPermInfo vn be (fromMaybe False upsrt) deps depHeaders
+ where
+ fieldInfoMap = tiFieldInfoMap tabInfo
+ tn = tiName tabInfo
+ vn = buildViewName tn rn PTInsert
+
+buildInsInfra :: QualifiedTable -> InsPermInfo -> Q.TxE QErr ()
+buildInsInfra tn (InsPermInfo vn be _ _ _) =
+ Q.catchE defaultTxErrorHandler $ do
+ -- Create the view
+ Q.unitQ (buildView tn vn) () False
+ -- Inject defaults on the view
+ Q.discardQ (injectDefaults vn tn) () False
+ -- Construct a trigger function
+ Q.unitQ (buildInsTrigFn vn tn be) () False
+ -- Add trigger for check expression
+ Q.unitQ (buildInsTrig vn) () False
+
+clearInsInfra :: QualifiedTable -> Q.TxE QErr ()
+clearInsInfra vn =
+ Q.catchE defaultTxErrorHandler $ do
+ dropView vn
+ Q.unitQ (dropInsTrigFn vn) () False
+
+type DropInsPerm = DropPerm InsPerm
+
+dropInsPermP2 :: (P2C m) => DropInsPerm -> QualifiedTable -> m ()
+dropInsPermP2 = dropPermP2
+
+type instance PermInfo InsPerm = InsPermInfo
+
+instance IsPerm InsPerm where
+
+ type DropPermP1Res InsPerm = QualifiedTable
+
+ permAccessor = PAInsert
+
+ buildPermInfo = buildInsPermInfo
+
+ addPermP2Setup qt _ permInfo =
+ liftTx $ buildInsInfra qt permInfo
+
+ buildDropPermP1Res dp =
+ ipiView <$> dropPermP1 dp
+
+ dropPermP2Setup _ vn =
+ liftTx $ clearInsInfra vn
+
+-- Select constraint
+data SelPerm
+ = SelPerm
+ { spColumns :: !PermColSpec -- Allowed columns
+ , spFilter :: !BoolExp -- Filter expression
+ } deriving (Show, Eq, Lift)
+
+$(deriveJSON (aesonDrop 2 snakeCase){omitNothingFields=True} ''SelPerm)
+
+buildSelPermInfo
+ :: (QErrM m, CacheRM m)
+ => TableInfo
+ -> SelPerm
+ -> m SelPermInfo
+buildSelPermInfo tabInfo sp = do
+ let pgCols = convColSpec fieldInfoMap $ spColumns sp
+
+ (be, beDeps) <- withPathK "filter" $
+ procBoolExp tn fieldInfoMap (S.mkQual tn) $ spFilter sp
+
+ -- check if the columns exist
+ void $ withPathK "columns" $ indexedForM pgCols $ \pgCol ->
+ askPGType fieldInfoMap pgCol autoInferredErr
+
+ let deps = mkParentDep tn : beDeps ++ map (mkColDep "untyped" tn) pgCols
+ depHeaders = getDependentHeaders $ spFilter sp
+
+ return $ SelPermInfo (HS.fromList pgCols) tn be deps depHeaders
+
+ where
+ tn = tiName tabInfo
+ fieldInfoMap = tiFieldInfoMap tabInfo
+ autoInferredErr = "permissions for relationships are automatically inferred"
+
+type SelPermDef = PermDef SelPerm
+type CreateSelPerm = CreatePerm SelPerm
+type DropSelPerm = DropPerm SelPerm
+
+type instance PermInfo SelPerm = SelPermInfo
+
+dropSelPermP2 :: (P2C m) => DropSelPerm -> m ()
+dropSelPermP2 dp = dropPermP2 dp ()
+
+instance IsPerm SelPerm where
+
+ type DropPermP1Res SelPerm = ()
+
+ permAccessor = PASelect
+
+ buildPermInfo ti (PermDef _ a _) =
+ buildSelPermInfo ti a
+
+ buildDropPermP1Res =
+ void . dropPermP1
+
+ addPermP2Setup _ _ _ = return ()
+
+ dropPermP2Setup _ _ = return ()
+
+-- Update constraint
+data UpdPerm
+ = UpdPerm
+ { ucColumns :: !PermColSpec -- Allowed columns
+ , ucFilter :: !BoolExp -- Filter expression
+ } deriving (Show, Eq, Lift)
+
+$(deriveJSON (aesonDrop 2 snakeCase){omitNothingFields=True} ''UpdPerm)
+
+type UpdPermDef = PermDef UpdPerm
+type CreateUpdPerm = CreatePerm UpdPerm
+
+buildUpdPermInfo
+ :: (QErrM m, CacheRM m)
+ => TableInfo
+ -> UpdPerm
+ -> m UpdPermInfo
+buildUpdPermInfo tabInfo (UpdPerm colSpec fltr) = do
+ (be, beDeps) <- withPathK "filter" $
+ procBoolExp tn fieldInfoMap (S.mkQual tn) fltr
+
+ -- check if the columns exist
+ _ <- withPathK "columns" $ indexedForM updCols $ \updCol ->
+ askPGType fieldInfoMap updCol relInUpdErr
+
+ let deps = mkParentDep tn : beDeps ++ map (mkColDep "untyped" tn) updCols
+ depHeaders = getDependentHeaders fltr
+
+ return $ UpdPermInfo (HS.fromList updCols) tn be deps depHeaders
+
+ where
+ tn = tiName tabInfo
+ fieldInfoMap = tiFieldInfoMap tabInfo
+ updCols = convColSpec fieldInfoMap colSpec
+ relInUpdErr = "relationships can't be used in update"
+
+type instance PermInfo UpdPerm = UpdPermInfo
+
+type DropUpdPerm = DropPerm UpdPerm
+
+dropUpdPermP2 :: (P2C m) => DropUpdPerm -> m ()
+dropUpdPermP2 dp = dropPermP2 dp ()
+
+instance IsPerm UpdPerm where
+
+ type DropPermP1Res UpdPerm = ()
+
+ permAccessor = PAUpdate
+
+ buildPermInfo ti (PermDef _ a _) =
+ buildUpdPermInfo ti a
+
+ addPermP2Setup _ _ _ = return ()
+
+ buildDropPermP1Res =
+ void . dropPermP1
+
+ dropPermP2Setup _ _ = return ()
+
+-- Delete permission
+data DelPerm
+ = DelPerm { dcFilter :: !BoolExp }
+ deriving (Show, Eq, Lift)
+
+$(deriveJSON (aesonDrop 2 snakeCase){omitNothingFields=True} ''DelPerm)
+
+type DelPermDef = PermDef DelPerm
+type CreateDelPerm = CreatePerm DelPerm
+
+buildDelPermInfo
+ :: (QErrM m, CacheRM m)
+ => TableInfo
+ -> DelPerm
+ -> m DelPermInfo
+buildDelPermInfo tabInfo (DelPerm fltr) = do
+ (be, beDeps) <- withPathK "filter" $
+ procBoolExp tn fieldInfoMap (S.mkQual tn) fltr
+ let deps = mkParentDep tn : beDeps
+ depHeaders = getDependentHeaders fltr
+ return $ DelPermInfo tn be deps depHeaders
+ where
+ tn = tiName tabInfo
+ fieldInfoMap = tiFieldInfoMap tabInfo
+
+type DropDelPerm = DropPerm DelPerm
+
+dropDelPermP2 :: (P2C m) => DropDelPerm -> m ()
+dropDelPermP2 dp = dropPermP2 dp ()
+
+type instance PermInfo DelPerm = DelPermInfo
+
+instance IsPerm DelPerm where
+
+ type DropPermP1Res DelPerm = ()
+
+ permAccessor = PADelete
+
+ buildPermInfo ti (PermDef _ a _) =
+ buildDelPermInfo ti a
+
+ addPermP2Setup _ _ _ = return ()
+
+ buildDropPermP1Res =
+ void . dropPermP1
+
+ dropPermP2Setup _ _ = return ()
+
+data SetPermComment
+ = SetPermComment
+ { apTable :: !QualifiedTable
+ , apRole :: !RoleName
+ , apPermission :: !PermType
+ , apComment :: !(Maybe T.Text)
+ } deriving (Show, Eq, Lift)
+
+$(deriveJSON (aesonDrop 2 snakeCase) ''SetPermComment)
+
+setPermCommentP1 :: (P1C m) => SetPermComment -> m ()
+setPermCommentP1 (SetPermComment qt rn pt _) = do
+ adminOnly
+ tabInfo <- askTabInfo qt
+ action tabInfo
+ where
+ action tabInfo = case pt of
+ PTInsert -> assertPermDefined rn PAInsert tabInfo
+ PTSelect -> assertPermDefined rn PASelect tabInfo
+ PTUpdate -> assertPermDefined rn PAUpdate tabInfo
+ PTDelete -> assertPermDefined rn PADelete tabInfo
+
+setPermCommentP2 :: (P2C m) => SetPermComment -> m RespBody
+setPermCommentP2 apc = do
+ liftTx $ setPermCommentTx apc
+ return successMsg
+
+instance HDBQuery SetPermComment where
+
+ type Phase1Res SetPermComment = ()
+ phaseOne = setPermCommentP1
+
+ phaseTwo q _ = setPermCommentP2 q
+
+ schemaCachePolicy = SCPNoChange
+
+setPermCommentTx
+ :: SetPermComment
+ -> Q.TxE QErr ()
+setPermCommentTx (SetPermComment (QualifiedTable sn tn) rn pt comment) =
+ Q.unitQE defaultTxErrorHandler [Q.sql|
+ UPDATE hdb_catalog.hdb_permission
+ SET comment = $1
+ WHERE table_schema = $2
+ AND table_name = $3
+ AND role_name = $4
+ AND perm_type = $5
+ |] (comment, sn, tn, rn, permTypeToCode pt) True
+
+purgePerm :: (P2C m) => QualifiedTable -> RoleName -> PermType -> m ()
+purgePerm qt rn pt =
+ case pt of
+ PTInsert -> dropInsPermP2 dp $ buildViewName qt rn PTInsert
+ PTSelect -> dropSelPermP2 dp
+ PTUpdate -> dropUpdPermP2 dp
+ PTDelete -> dropDelPermP2 dp
+ where
+ dp :: DropPerm a
+ dp = DropPerm qt rn
diff --git a/server/src-lib/Hasura/RQL/DDL/Permission/Internal.hs b/server/src-lib/Hasura/RQL/DDL/Permission/Internal.hs
new file mode 100644
index 00000000..605dbb40
--- /dev/null
+++ b/server/src-lib/Hasura/RQL/DDL/Permission/Internal.hs
@@ -0,0 +1,337 @@
+{-# LANGUAGE DeriveLift #-}
+{-# LANGUAGE FlexibleContexts #-}
+{-# LANGUAGE FlexibleInstances #-}
+{-# LANGUAGE OverloadedStrings #-}
+{-# LANGUAGE QuasiQuotes #-}
+{-# LANGUAGE RankNTypes #-}
+{-# LANGUAGE TemplateHaskell #-}
+{-# LANGUAGE TypeApplications #-}
+{-# LANGUAGE TypeFamilies #-}
+{-# LANGUAGE TypeFamilyDependencies #-}
+
+module Hasura.RQL.DDL.Permission.Internal where
+
+import Control.Lens hiding ((.=))
+import Data.Aeson.Casing
+import Data.Aeson.TH
+import Data.Aeson.Types
+import Instances.TH.Lift ()
+import Language.Haskell.TH.Syntax (Lift)
+
+import qualified Data.ByteString.Builder as BB
+import qualified Data.HashMap.Strict as M
+import qualified Data.Text.Encoding as TE
+import qualified Data.Text.Extended as T
+
+import Hasura.Prelude
+import Hasura.RQL.GBoolExp
+import Hasura.RQL.Types
+import Hasura.Server.Utils
+import Hasura.SQL.Types
+
+import qualified Database.PG.Query as Q
+import qualified Hasura.SQL.DML as S
+
+data PermColSpec
+ = PCStar
+ | PCCols ![PGCol]
+ deriving (Show, Eq, Lift)
+
+instance FromJSON PermColSpec where
+ parseJSON (String "*") = return PCStar
+ parseJSON x = PCCols <$> parseJSON x
+
+instance ToJSON PermColSpec where
+ toJSON (PCCols cols) = toJSON cols
+ toJSON PCStar = "*"
+
+convColSpec :: FieldInfoMap -> PermColSpec -> [PGCol]
+convColSpec _ (PCCols cols) = cols
+convColSpec cim PCStar =
+ map pgiName $ fst $ partitionEithers $
+ map fieldInfoToEither $ M.elems cim
+
+assertPermNotDefined
+ :: (MonadError QErr m)
+ => RoleName
+ -> PermAccessor a
+ -> TableInfo
+ -> m ()
+assertPermNotDefined roleName pa tableInfo =
+ when (permissionIsDefined rpi pa) $ throw400 AlreadyExists $ mconcat
+ [ "'" <> T.pack (show $ permAccToType pa) <> "'"
+ , " permission on " <>> tiName tableInfo
+ , " for role " <>> roleName
+ , " already exists"
+ ]
+ where
+ rpi = M.lookup roleName $ tiRolePermInfoMap tableInfo
+
+permissionIsDefined
+ :: Maybe RolePermInfo -> PermAccessor a -> Bool
+permissionIsDefined rpi pa =
+ isJust $ join $ rpi ^? _Just.(permAccToLens pa)
+
+assertPermDefined
+ :: (MonadError QErr m)
+ => RoleName
+ -> PermAccessor a
+ -> TableInfo
+ -> m ()
+assertPermDefined roleName pa tableInfo =
+ unless (permissionIsDefined rpi pa) $ throw400 PermissionDenied $ mconcat
+ [ "'" <> T.pack (show $ permAccToType pa) <> "'"
+ , " permission on " <>> tiName tableInfo
+ , " for role " <>> roleName
+ , " does not exist"
+ ]
+ where
+ rpi = M.lookup roleName $ tiRolePermInfoMap tableInfo
+
+askPermInfo
+ :: (MonadError QErr m)
+ => TableInfo
+ -> RoleName
+ -> PermAccessor c
+ -> m c
+askPermInfo tabInfo roleName pa =
+ case M.lookup roleName rpim >>= (^. paL) of
+ Just c -> return c
+ Nothing -> throw400 PermissionDenied $ mconcat
+ [ pt <> " permisison on " <>> tiName tabInfo
+ , " for role " <>> roleName
+ , " does not exist"
+ ]
+ where
+ paL = permAccToLens pa
+ pt = permTypeToCode $ permAccToType pa
+ rpim = tiRolePermInfoMap tabInfo
+
+savePermToCatalog
+ :: (ToJSON a)
+ => PermType
+ -> QualifiedTable
+ -> PermDef a
+ -> Q.TxE QErr ()
+savePermToCatalog pt (QualifiedTable sn tn) (PermDef rn qdef mComment) =
+ Q.unitQE defaultTxErrorHandler [Q.sql|
+ INSERT INTO
+ hdb_catalog.hdb_permission
+ (table_schema, table_name, role_name, perm_type, perm_def, comment)
+ VALUES ($1, $2, $3, $4, $5 :: jsonb, $6)
+ |] (sn, tn, rn, permTypeToCode pt, Q.AltJ qdef, mComment) True
+
+dropPermFromCatalog
+ :: QualifiedTable
+ -> RoleName
+ -> PermType
+ -> Q.TxE QErr ()
+dropPermFromCatalog (QualifiedTable sn tn) rn pt =
+ Q.unitQE defaultTxErrorHandler [Q.sql|
+ DELETE FROM
+ hdb_catalog.hdb_permission
+ WHERE
+ table_schema = $1
+ AND table_name = $2
+ AND role_name = $3
+ AND perm_type = $4
+ |] (sn, tn, rn, permTypeToCode pt) True
+
+type CreatePerm a = WithTable (PermDef a)
+
+data PermDef a =
+ PermDef
+ { pdRole :: !RoleName
+ , pdPermission :: !a
+ , pdComment :: !(Maybe T.Text)
+ } deriving (Show, Eq, Lift)
+
+$(deriveFromJSON (aesonDrop 2 snakeCase){omitNothingFields=True} ''PermDef)
+
+instance (ToJSON a) => ToJSON (PermDef a) where
+ toJSON = object . toAesonPairs
+
+instance (ToJSON a) => ToAesonPairs (PermDef a) where
+ toAesonPairs (PermDef rn perm comment) =
+ [ "role" .= rn
+ , "permission" .= perm
+ , "comment" .= comment
+ ]
+
+data CreatePermP1Res a
+ = CreatePermP1Res
+ { cprInfo :: !a
+ , cprDeps :: ![SchemaDependency]
+ } deriving (Show, Eq)
+
+createPermP1 :: (P1C m) => QualifiedTable -> m TableInfo
+createPermP1 tn = do
+ adminOnly
+ askTabInfo tn
+
+procBoolExp
+ :: (QErrM m, CacheRM m)
+ => QualifiedTable -> FieldInfoMap -> S.Qual -> BoolExp
+ -> m (S.BoolExp, [SchemaDependency])
+procBoolExp tn fieldInfoMap tq be = do
+ abe <- annBoolExp valueParser fieldInfoMap be
+ sqlbe <- convFilterExp tq abe
+ let deps = getBoolExpDeps tn abe
+ return (sqlbe, deps)
+
+getDependentHeaders :: BoolExp -> [T.Text]
+getDependentHeaders boolExp = case boolExp of
+ BoolAnd exps -> concatMap getDependentHeaders exps
+ BoolOr exps -> concatMap getDependentHeaders exps
+ BoolCol (ColExp _ v) -> parseValue v
+ BoolNot be -> getDependentHeaders be
+ where
+ parseValue val = case val of
+ (Object o) -> parseObject o
+ _ -> parseOnlyString val
+
+ parseOnlyString val = case val of
+ (String t) -> if isXHasuraTxt t
+ then [T.toLower t]
+ else []
+ _ -> []
+ parseObject o = flip concatMap (M.toList o) $ \(k, v) ->
+ if isRQLOp k
+ then parseOnlyString v
+ else []
+
+
+valueParser :: (MonadError QErr m) => PGColType -> Value -> m S.SQLExp
+valueParser columnType val = case (val, columnType) of
+ -- When it is a special variable
+ (String t, ty) ->
+ if isXHasuraTxt t
+ then return $ S.SEUnsafe $
+ "current_setting('hasura." <> dropAndSnakeCase t
+ <> "')::" <> T.pack (show ty)
+ else txtRHSBuilder ty val
+ -- Typical value as Aeson's value
+ _ -> txtRHSBuilder columnType val
+
+-- Convert where clause into SQL BoolExp
+convFilterExp :: (MonadError QErr m)
+ => S.Qual -> GBoolExp AnnValS -> m S.BoolExp
+convFilterExp tq be =
+ cBoolExp <$> convBoolRhs builderStrategy tq be
+ where
+ builderStrategy = mkBoolExpBuilder return
+
+injectDefaults :: QualifiedTable -> QualifiedTable -> Q.Query
+injectDefaults qv qt =
+ Q.fromBuilder $ mconcat
+ [ BB.string7 "SELECT hdb_catalog.inject_table_defaults("
+ , TE.encodeUtf8Builder $ pgFmtLit vsn
+ , BB.string7 ", "
+ , TE.encodeUtf8Builder $ pgFmtLit vn
+ , BB.string7 ", "
+ , TE.encodeUtf8Builder $ pgFmtLit tsn
+ , BB.string7 ", "
+ , TE.encodeUtf8Builder $ pgFmtLit tn
+ , BB.string7 ");"
+ ]
+ where
+ QualifiedTable (SchemaName vsn) (TableName vn) = qv
+ QualifiedTable (SchemaName tsn) (TableName tn) = qt
+
+data DropPerm a
+ = DropPerm
+ { dipTable :: !QualifiedTable
+ , dipRole :: !RoleName
+ } deriving (Show, Eq, Lift)
+
+$(deriveJSON (aesonDrop 3 snakeCase){omitNothingFields=True} ''DropPerm)
+
+type family PermInfo a = r | r -> a
+
+class (ToJSON a) => IsPerm a where
+
+ type DropPermP1Res a
+
+ permAccessor
+ :: PermAccessor (PermInfo a)
+
+ buildPermInfo
+ :: (QErrM m, CacheRM m)
+ => TableInfo
+ -> PermDef a
+ -> m (PermInfo a)
+
+ addPermP2Setup
+ :: (CacheRWM m, MonadTx m) => QualifiedTable -> PermDef a -> PermInfo a -> m ()
+
+ buildDropPermP1Res
+ :: (QErrM m, CacheRM m, UserInfoM m)
+ => DropPerm a
+ -> m (DropPermP1Res a)
+
+ dropPermP2Setup
+ :: (CacheRWM m, MonadTx m) => DropPerm a -> DropPermP1Res a -> m ()
+
+ getPermAcc1
+ :: PermDef a -> PermAccessor (PermInfo a)
+ getPermAcc1 _ = permAccessor
+
+ getPermAcc2
+ :: DropPerm a -> PermAccessor (PermInfo a)
+ getPermAcc2 _ = permAccessor
+
+addPermP1 :: (QErrM m, CacheRM m, IsPerm a) => TableInfo -> PermDef a -> m (PermInfo a)
+addPermP1 tabInfo pd = do
+ assertPermNotDefined (pdRole pd) (getPermAcc1 pd) tabInfo
+ buildPermInfo tabInfo pd
+
+addPermP2 :: (IsPerm a, QErrM m, CacheRWM m, MonadTx m)
+ => QualifiedTable -> PermDef a -> PermInfo a -> m ()
+addPermP2 tn pd permInfo = do
+ addPermP2Setup tn pd permInfo
+ addPermToCache tn (pdRole pd) pa permInfo
+ liftTx $ savePermToCatalog pt tn pd
+ where
+ pa = getPermAcc1 pd
+ pt = permAccToType pa
+
+instance (IsPerm a) => HDBQuery (CreatePerm a) where
+
+ type Phase1Res (CreatePerm a) = PermInfo a
+
+ phaseOne (WithTable tn pd) = do
+ tabInfo <- createPermP1 tn
+ addPermP1 tabInfo pd
+
+ phaseTwo (WithTable tn pd) permInfo = do
+ addPermP2 tn pd permInfo
+ return successMsg
+
+ schemaCachePolicy = SCPReload
+
+dropPermP1 :: (QErrM m, CacheRM m, UserInfoM m, IsPerm a) => DropPerm a -> m (PermInfo a)
+dropPermP1 dp@(DropPerm tn rn) = do
+ adminOnly
+ tabInfo <- askTabInfo tn
+ askPermInfo tabInfo rn $ getPermAcc2 dp
+
+dropPermP2
+ :: (IsPerm a, QErrM m, CacheRWM m, MonadTx m)
+ => DropPerm a -> DropPermP1Res a -> m ()
+dropPermP2 dp@(DropPerm tn rn) p1Res = do
+ dropPermP2Setup dp p1Res
+ delPermFromCache pa rn tn
+ liftTx $ dropPermFromCatalog tn rn pt
+ where
+ pa = getPermAcc2 dp
+ pt = permAccToType pa
+
+instance (IsPerm a) => HDBQuery (DropPerm a) where
+
+ type Phase1Res (DropPerm a) = DropPermP1Res a
+
+ phaseOne = buildDropPermP1Res
+
+ phaseTwo dp p1Res = dropPermP2 dp p1Res >> return successMsg
+
+ schemaCachePolicy = SCPReload
diff --git a/server/src-lib/Hasura/RQL/DDL/QueryTemplate.hs b/server/src-lib/Hasura/RQL/DDL/QueryTemplate.hs
new file mode 100644
index 00000000..9ab8e758
--- /dev/null
+++ b/server/src-lib/Hasura/RQL/DDL/QueryTemplate.hs
@@ -0,0 +1,219 @@
+{-# LANGUAGE DeriveLift #-}
+{-# LANGUAGE FlexibleContexts #-}
+{-# LANGUAGE FlexibleInstances #-}
+{-# LANGUAGE GeneralizedNewtypeDeriving #-}
+{-# LANGUAGE OverloadedStrings #-}
+{-# LANGUAGE QuasiQuotes #-}
+{-# LANGUAGE TemplateHaskell #-}
+{-# LANGUAGE TypeFamilies #-}
+
+module Hasura.RQL.DDL.QueryTemplate
+ ( createQueryTemplateP1
+ , createQueryTemplateP2
+ , delQTemplateFromCatalog
+ , TemplateParamConf(..)
+ , CreateQueryTemplate(..)
+ , DropQueryTemplate(..)
+ , QueryTP1
+ , SetQueryTemplateComment(..)
+ ) where
+
+import Hasura.RQL.GBoolExp (txtRHSBuilder)
+import Hasura.RQL.Types
+import Hasura.SQL.Types
+import Hasura.SQL.Value
+import Hasura.Prelude
+
+import qualified Database.PG.Query as Q
+import qualified Hasura.RQL.DML.Count as R
+import qualified Hasura.RQL.DML.Delete as R
+import qualified Hasura.RQL.DML.Insert as R
+import qualified Hasura.RQL.DML.Select as R
+import qualified Hasura.RQL.DML.Update as R
+import qualified Hasura.SQL.DML as PS
+
+import Data.Aeson
+import Data.Aeson.Casing
+import Data.Aeson.TH
+import Instances.TH.Lift ()
+import Language.Haskell.TH.Syntax (Lift)
+
+import qualified Data.HashMap.Strict as M
+import qualified Data.Text as T
+
+data TemplateParamConf
+ = TemplateParamConf
+ { tpcParam :: !TemplateParam
+ , tpcDefault :: !(Maybe Value)
+ } deriving (Show, Eq, Lift)
+
+$(deriveJSON (aesonDrop 3 snakeCase){omitNothingFields=True} ''TemplateParamConf)
+
+data CreateQueryTemplate
+ = CreateQueryTemplate
+ { cqtName :: !TQueryName
+ , cqtTemplate :: !QueryT
+ , cqtComment :: !(Maybe T.Text)
+ } deriving (Show, Eq, Lift)
+
+$(deriveJSON (aesonDrop 3 snakeCase){omitNothingFields=True} ''CreateQueryTemplate)
+
+validateParam
+ :: PGColType
+ -> Value
+ -> P1 PS.SQLExp
+validateParam pct val =
+ case val of
+ Object _ -> do
+ tpc <- decodeValue val
+ withPathK "default" $
+ maybe (return ()) validateDefault $ tpcDefault tpc
+ return $ PS.SELit "NULL"
+ _ -> txtRHSBuilder pct val
+ where
+ validateDefault =
+ void . runAesonParser (convToBin pct)
+
+data QueryTP1
+ = QTP1Insert R.InsertQueryP1
+ | QTP1Select R.SelectData
+ | QTP1Update R.UpdateQueryP1
+ | QTP1Delete R.DeleteQueryP1
+ | QTP1Count R.CountQueryP1
+ | QTP1Bulk [QueryTP1]
+ deriving (Show, Eq)
+
+validateTQuery
+ :: QueryT
+ -> P1 QueryTP1
+validateTQuery qt = withPathK "args" $ case qt of
+ QTInsert q -> QTP1Insert <$> R.convInsertQuery decodeInsObjs validateParam q
+ QTSelect q -> QTP1Select <$> R.convSelectQuery validateParam q
+ QTUpdate q -> QTP1Update <$> R.convUpdateQuery validateParam q
+ QTDelete q -> QTP1Delete <$> R.convDeleteQuery validateParam q
+ QTCount q -> QTP1Count <$> R.countP1 validateParam q
+ QTBulk q -> QTP1Bulk <$> mapM validateTQuery q
+ where
+ decodeInsObjs val = do
+ tpc <- decodeValue val
+ mDefObjs <- mapM decodeValue $ tpcDefault tpc
+ return $ fromMaybe [] mDefObjs
+
+collectDeps
+ :: QueryTP1 -> [SchemaDependency]
+collectDeps qt = case qt of
+ QTP1Insert qp1 -> R.getInsertDeps qp1
+ QTP1Select qp1 -> R.getSelectDeps qp1
+ QTP1Update qp1 -> R.getUpdateDeps qp1
+ QTP1Delete qp1 -> R.getDeleteDeps qp1
+ QTP1Count qp1 -> R.getCountDeps qp1
+ QTP1Bulk qp1 -> concatMap collectDeps qp1
+
+createQueryTemplateP1
+ :: (P1C m) => CreateQueryTemplate -> m QueryTemplateInfo
+createQueryTemplateP1 (CreateQueryTemplate qtn qt _) = do
+ adminOnly
+ ui <- askUserInfo
+ sc <- askSchemaCache
+ withPathK "name" $ when (isJust $ M.lookup qtn $ scQTemplates sc) $
+ throw400 AlreadyExists $ "the query template already exists : " <>> qtn
+ let qCtx = QCtx ui sc
+ qtp1 <- withPathK "template" $ liftP1 qCtx $ validateTQuery qt
+ let deps = collectDeps qtp1
+ return $ QueryTemplateInfo qtn qt deps
+
+addQTemplateToCatalog
+ :: CreateQueryTemplate
+ -> Q.TxE QErr ()
+addQTemplateToCatalog (CreateQueryTemplate qtName qtDef mComment) =
+ Q.unitQE defaultTxErrorHandler [Q.sql|
+ INSERT INTO
+ hdb_catalog.hdb_query_template
+ (template_name, template_defn, comment)
+ VALUES ($1, $2 :: jsonb, $3)
+ |] (qtName, Q.AltJ qtDef, mComment) False
+
+createQueryTemplateP2
+ :: (P2C m)
+ => CreateQueryTemplate -> QueryTemplateInfo -> m RespBody
+createQueryTemplateP2 cqt qti = do
+ addQTemplateToCache qti
+ liftTx $ addQTemplateToCatalog cqt
+ return successMsg
+
+instance HDBQuery CreateQueryTemplate where
+
+ type Phase1Res CreateQueryTemplate = QueryTemplateInfo
+ phaseOne = createQueryTemplateP1
+
+ phaseTwo = createQueryTemplateP2
+
+ schemaCachePolicy = SCPReload
+
+data DropQueryTemplate
+ = DropQueryTemplate
+ { dqtName :: !TQueryName
+ } deriving (Show, Eq, Lift)
+
+$(deriveJSON (aesonDrop 3 snakeCase){omitNothingFields=True} ''DropQueryTemplate)
+
+delQTemplateFromCatalog
+ :: TQueryName
+ -> Q.TxE QErr ()
+delQTemplateFromCatalog qtn =
+ Q.unitQE defaultTxErrorHandler [Q.sql|
+ DELETE FROM
+ hdb_catalog.hdb_query_template
+ WHERE template_name = $1
+ |] (Identity qtn) False
+
+instance HDBQuery DropQueryTemplate where
+
+ type Phase1Res DropQueryTemplate = ()
+ phaseOne (DropQueryTemplate qtn) =
+ withPathK "name" $ void $ askQTemplateInfo qtn
+
+ phaseTwo (DropQueryTemplate qtn) _ = do
+ delQTemplateFromCache qtn
+ liftTx $ delQTemplateFromCatalog qtn
+ return successMsg
+
+ schemaCachePolicy = SCPReload
+
+data SetQueryTemplateComment
+ = SetQueryTemplateComment
+ { sqtcName :: !TQueryName
+ , sqtcComment :: !(Maybe T.Text)
+ } deriving (Show, Eq, Lift)
+
+$(deriveJSON (aesonDrop 4 snakeCase) ''SetQueryTemplateComment)
+
+setQueryTemplateCommentP1 :: (P1C m) => SetQueryTemplateComment -> m ()
+setQueryTemplateCommentP1 (SetQueryTemplateComment qtn _) = do
+ adminOnly
+ void $ askQTemplateInfo qtn
+
+setQueryTemplateCommentP2 :: (P2C m) => SetQueryTemplateComment -> m RespBody
+setQueryTemplateCommentP2 apc = do
+ liftTx $ setQueryTemplateCommentTx apc
+ return successMsg
+
+instance HDBQuery SetQueryTemplateComment where
+
+ type Phase1Res SetQueryTemplateComment = ()
+ phaseOne = setQueryTemplateCommentP1
+
+ phaseTwo q _ = setQueryTemplateCommentP2 q
+
+ schemaCachePolicy = SCPNoChange
+
+setQueryTemplateCommentTx
+ :: SetQueryTemplateComment
+ -> Q.TxE QErr ()
+setQueryTemplateCommentTx (SetQueryTemplateComment qtn comment) =
+ Q.unitQE defaultTxErrorHandler
+ [Q.sql|
+ UPDATE hdb_catalog.hdb_query_template
+ SET comment = $1
+ WHERE template_name = $2
+ |] (comment, qtn) False
diff --git a/server/src-lib/Hasura/RQL/DDL/Relationship.hs b/server/src-lib/Hasura/RQL/DDL/Relationship.hs
new file mode 100644
index 00000000..6de42f66
--- /dev/null
+++ b/server/src-lib/Hasura/RQL/DDL/Relationship.hs
@@ -0,0 +1,419 @@
+{-# LANGUAGE DeriveLift #-}
+{-# LANGUAGE FlexibleContexts #-}
+{-# LANGUAGE FlexibleInstances #-}
+{-# LANGUAGE GeneralizedNewtypeDeriving #-}
+{-# LANGUAGE OverloadedStrings #-}
+{-# LANGUAGE QuasiQuotes #-}
+{-# LANGUAGE ScopedTypeVariables #-}
+{-# LANGUAGE TemplateHaskell #-}
+{-# LANGUAGE TypeFamilies #-}
+
+module Hasura.RQL.DDL.Relationship where
+
+import qualified Database.PG.Query as Q
+import Hasura.RQL.DDL.Deps
+import Hasura.RQL.DDL.Permission (purgePerm)
+import Hasura.RQL.Types
+import Hasura.SQL.Types
+import Hasura.Prelude
+
+import Data.Aeson.Casing
+import Data.Aeson.TH
+import Data.Aeson.Types
+import qualified Data.HashMap.Strict as HM
+import qualified Data.Map.Strict as M
+import qualified Data.Text as T
+import Data.Tuple (swap)
+import Instances.TH.Lift ()
+import Language.Haskell.TH.Syntax (Lift)
+
+data RelDef a
+ = RelDef
+ { rdName :: !RelName
+ , rdUsing :: !a
+ , rdComment :: !(Maybe T.Text)
+ } deriving (Show, Eq, Lift)
+
+$(deriveFromJSON (aesonDrop 2 snakeCase){omitNothingFields=True} ''RelDef)
+
+instance (ToJSON a) => ToJSON (RelDef a) where
+ toJSON = object . toAesonPairs
+
+instance (ToJSON a) => ToAesonPairs (RelDef a) where
+ toAesonPairs (RelDef rn ru rc) =
+ [ "name" .= rn
+ , "using" .= ru
+ , "comment" .= rc
+ ]
+
+data RelManualConfig
+ = RelManualConfig
+ { rmTable :: !QualifiedTable
+ , rmColumns :: !(M.Map PGCol PGCol)
+ } deriving (Show, Eq, Lift)
+
+instance FromJSON RelManualConfig where
+ parseJSON (Object v) =
+ RelManualConfig
+ <$> v .: "remote_table"
+ <*> v .: "column_mapping"
+
+ parseJSON _ =
+ fail "manual_configuration should be an object"
+
+instance ToJSON RelManualConfig where
+ toJSON (RelManualConfig qt cm) =
+ object [ "remote_table" .= qt
+ , "column_mapping" .= cm
+ ]
+
+data RelUsing a b
+ = RUFKeyOn a
+ | RUManual b
+ deriving (Show, Eq, Lift)
+
+instance (ToJSON a, ToJSON b) => ToJSON (RelUsing a b) where
+ toJSON (RUFKeyOn fkey) =
+ object [ "foreign_key_constraint_on" .= fkey ]
+ toJSON (RUManual manual) =
+ object [ "manual_configuration" .= manual ]
+
+instance (FromJSON a, FromJSON b) => FromJSON (RelUsing a b) where
+ parseJSON (Object o) = do
+ let fkeyOnM = HM.lookup "foreign_key_constraint_on" o
+ manualM = HM.lookup "manual_configuration" o
+ let msgFrag = "one of foreign_key_constraint_on/manual_configuration should be present"
+ case (fkeyOnM, manualM) of
+ (Nothing, Nothing) -> fail $ "atleast " <> msgFrag
+ (Just a, Nothing) -> RUFKeyOn <$> parseJSON a
+ (Nothing, Just b) -> RUManual <$> parseJSON b
+ _ -> fail $ "only " <> msgFrag
+ parseJSON _ =
+ fail "using should be an object"
+
+newtype ObjRelManualConfig =
+ ObjRelManualConfig { getObjRelMapping :: RelManualConfig }
+ deriving (Show, Eq, FromJSON, ToJSON, Lift)
+
+validateManualConfig
+ :: (QErrM m, CacheRM m)
+ => FieldInfoMap
+ -> RelManualConfig
+ -> m ()
+validateManualConfig fim rm = do
+ let colMapping = M.toList $ rmColumns rm
+ remoteQt = rmTable rm
+ remoteTabInfo <- askTabInfo remoteQt
+ let remoteFim = tiFieldInfoMap remoteTabInfo
+ forM_ colMapping $ \(lCol, rCol) -> do
+ assertPGCol fim "" lCol
+ assertPGCol remoteFim "" rCol
+ -- lColType <- askPGType fim lCol ""
+ -- rColType <- askPGType remoteFim rCol ""
+ -- when (lColType /= rColType) $
+ -- throw400 $ mconcat
+ -- [ "the types of columns " <> lCol <<> ", " <>> rCol
+ -- , " do not match"
+ -- ]
+
+persistRel :: QualifiedTable
+ -> RelName
+ -> RelType
+ -> Value
+ -> Maybe T.Text
+ -> Q.TxE QErr ()
+persistRel (QualifiedTable sn tn) rn relType relDef comment =
+ Q.unitQE defaultTxErrorHandler [Q.sql|
+ INSERT INTO
+ hdb_catalog.hdb_relationship
+ (table_schema, table_name, rel_name, rel_type, rel_def, comment)
+ VALUES ($1, $2, $3, $4, $5 :: jsonb, $6)
+ |] (sn, tn, rn, relTypeToTxt relType, Q.AltJ relDef, comment) True
+
+checkForColConfilct
+ :: (MonadError QErr m)
+ => TableInfo
+ -> FieldName
+ -> m ()
+checkForColConfilct tabInfo f =
+ case HM.lookup f (tiFieldInfoMap tabInfo) of
+ Just _ -> throw400 AlreadyExists $ mconcat
+ [ "column/relationship " <>> f
+ , " of table " <>> tiName tabInfo
+ , " already exists"
+ ]
+ Nothing -> return ()
+
+type ObjRelUsing = RelUsing PGCol ObjRelManualConfig
+type ObjRelDef = RelDef ObjRelUsing
+
+type CreateObjRel = WithTable ObjRelDef
+
+objRelP1
+ :: (QErrM m, CacheRM m)
+ => TableInfo
+ -> ObjRelDef
+ -> m ()
+objRelP1 tabInfo (RelDef rn ru _) = do
+ checkForColConfilct tabInfo (fromRel rn)
+ let fim = tiFieldInfoMap tabInfo
+ case ru of
+ RUFKeyOn cn -> assertPGCol fim "" cn
+ RUManual (ObjRelManualConfig rm) -> validateManualConfig fim rm
+
+createObjRelP1
+ :: (P1C m)
+ => CreateObjRel
+ -> m ()
+createObjRelP1 (WithTable qt rd) = do
+ adminOnly
+ tabInfo <- askTabInfo qt
+ objRelP1 tabInfo rd
+
+objRelP2Setup :: (P2C m) => QualifiedTable -> RelDef ObjRelUsing -> m ()
+objRelP2Setup qt (RelDef rn ru _) = do
+ relInfo <- case ru of
+ RUManual (ObjRelManualConfig rm) -> do
+ let refqt = rmTable rm
+ (lCols, rCols) = unzip $ M.toList $ rmColumns rm
+ deps = map (\c -> SchemaDependency (SOTableObj qt $ TOCol c) "lcol") lCols
+ <> map (\c -> SchemaDependency (SOTableObj refqt $ TOCol c) "rcol") rCols
+ return $ RelInfo rn ObjRel (zip lCols rCols) refqt deps
+ RUFKeyOn cn -> do
+ res <- liftTx $ Q.catchE defaultTxErrorHandler $ fetchFKeyDetail cn
+ case mapMaybe processRes res of
+ [] -> throw400 ConstraintError
+ "no foreign constraint exists on the given column"
+ [(consName, refsn, reftn, colMapping)] -> do
+ let deps = [ SchemaDependency (SOTableObj qt $ TOCons consName) "fkey"
+ , SchemaDependency (SOTableObj qt $ TOCol cn) "using_col"
+ ]
+ refqt = QualifiedTable refsn reftn
+ return $ RelInfo rn ObjRel colMapping refqt deps
+ _ -> throw400 ConstraintError
+ "more than one foreign key constraint exists on the given column"
+ addFldToCache (fromRel rn) (FIRelationship relInfo) qt
+ where
+ QualifiedTable sn tn = qt
+ fetchFKeyDetail cn =
+ Q.listQ [Q.sql|
+ SELECT constraint_name, ref_table_table_schema, ref_table, column_mapping
+ FROM hdb_catalog.hdb_foreign_key_constraint
+ WHERE table_schema = $1
+ AND table_name = $2
+ AND column_mapping ->> $3 IS NOT NULL
+ |] (sn, tn, cn) False
+ processRes (consn, refsn, reftn, mapping) =
+ case M.toList (Q.getAltJ mapping) of
+ m@[_] -> Just (consn, refsn, reftn, m)
+ _ -> Nothing
+
+objRelP2 :: (P2C m) => QualifiedTable -> ObjRelDef -> m ()
+objRelP2 qt rd@(RelDef rn ru comment) = do
+ objRelP2Setup qt rd
+ liftTx $ persistRel qt rn ObjRel (toJSON ru) comment
+
+createObjRelP2 :: (P2C m) => CreateObjRel -> m RespBody
+createObjRelP2 (WithTable qt rd) = do
+ objRelP2 qt rd
+ return successMsg
+
+instance HDBQuery CreateObjRel where
+
+ type Phase1Res CreateObjRel = ()
+ phaseOne = createObjRelP1
+
+ phaseTwo cor _ = createObjRelP2 cor
+
+ schemaCachePolicy = SCPReload
+
+data ArrRelUsingFKeyOn
+ = ArrRelUsingFKeyOn
+ { arufTable :: !QualifiedTable
+ , arufColumn :: !PGCol
+ } deriving (Show, Eq, Lift)
+
+$(deriveJSON (aesonDrop 4 snakeCase){omitNothingFields=True} ''ArrRelUsingFKeyOn)
+
+newtype ArrRelManualConfig =
+ ArrRelManualConfig { getArrRelMapping :: RelManualConfig }
+ deriving (Show, Eq, FromJSON, ToJSON, Lift)
+
+type ArrRelUsing = RelUsing ArrRelUsingFKeyOn ArrRelManualConfig
+type ArrRelDef = RelDef ArrRelUsing
+type CreateArrRel = WithTable ArrRelDef
+
+createArrRelP1 :: (P1C m) => CreateArrRel -> m ()
+createArrRelP1 (WithTable qt rd) = do
+ adminOnly
+ tabInfo <- askTabInfo qt
+ arrRelP1 tabInfo rd
+
+arrRelP1
+ :: (QErrM m, CacheRM m)
+ => TableInfo -> ArrRelDef -> m ()
+arrRelP1 tabInfo (RelDef rn ru _) = do
+ checkForColConfilct tabInfo (fromRel rn)
+ let fim = tiFieldInfoMap tabInfo
+ case ru of
+ RUFKeyOn (ArrRelUsingFKeyOn remoteQt rcn) -> do
+ remoteTabInfo <- askTabInfo remoteQt
+ let rfim = tiFieldInfoMap remoteTabInfo
+ -- Check if 'using' column exists
+ assertPGCol rfim "" rcn
+ RUManual (ArrRelManualConfig rm) ->
+ validateManualConfig fim rm
+
+arrRelP2Setup :: (P2C m) => QualifiedTable -> ArrRelDef -> m ()
+arrRelP2Setup qt (RelDef rn ru _) = do
+ relInfo <- case ru of
+ RUManual (ArrRelManualConfig rm) -> do
+ let refqt = rmTable rm
+ (lCols, rCols) = unzip $ M.toList $ rmColumns rm
+ deps = map (\c -> SchemaDependency (SOTableObj qt $ TOCol c) "lcol") lCols
+ <> map (\c -> SchemaDependency (SOTableObj refqt $ TOCol c) "rcol") rCols
+ return $ RelInfo rn ArrRel (zip lCols rCols) refqt deps
+ RUFKeyOn (ArrRelUsingFKeyOn refqt refCol) -> do
+ let QualifiedTable refSn refTn = refqt
+ res <- liftTx $ Q.catchE defaultTxErrorHandler $
+ fetchFKeyDetail refSn refTn refCol
+ case mapMaybe processRes res of
+ [] -> throw400 ConstraintError
+ "no foreign constraint exists on the given column"
+ [(consName, mapping)] -> do
+ let deps = [ SchemaDependency (SOTableObj refqt $ TOCons consName) "remote_fkey"
+ , SchemaDependency (SOTableObj refqt $ TOCol refCol) "using_col"
+ ]
+ return $ RelInfo rn ArrRel (map swap mapping) refqt deps
+ _ -> throw400 ConstraintError
+ "more than one foreign key constraint exists on the given column"
+ addFldToCache (fromRel rn) (FIRelationship relInfo) qt
+ where
+ QualifiedTable sn tn = qt
+ fetchFKeyDetail refsn reftn refcn = Q.listQ [Q.sql|
+ SELECT constraint_name, column_mapping
+ FROM hdb_catalog.hdb_foreign_key_constraint
+ WHERE table_schema = $1
+ AND table_name = $2
+ AND column_mapping -> $3 IS NOT NULL
+ AND ref_table_table_schema = $4
+ AND ref_table = $5
+ |] (refsn, reftn, refcn, sn, tn) False
+ processRes (consn, mapping) =
+ case M.toList (Q.getAltJ mapping) of
+ m@[_] -> Just (consn, m)
+ _ -> Nothing
+
+arrRelP2 :: (P2C m) => QualifiedTable -> ArrRelDef -> m ()
+arrRelP2 qt rd@(RelDef rn u comment) = do
+ arrRelP2Setup qt rd
+ liftTx $ persistRel qt rn ArrRel (toJSON u) comment
+
+createArrRelP2 :: (P2C m) => CreateArrRel -> m RespBody
+createArrRelP2 (WithTable qt rd) = do
+ arrRelP2 qt rd
+ return successMsg
+
+instance HDBQuery CreateArrRel where
+
+ type Phase1Res CreateArrRel = ()
+ phaseOne = createArrRelP1
+
+ phaseTwo car _ = createArrRelP2 car
+
+ schemaCachePolicy = SCPReload
+
+data DropRel
+ = DropRel
+ { drTable :: !QualifiedTable
+ , drRelationship :: !RelName
+ , drCascade :: !(Maybe Bool)
+ } deriving (Show, Eq, Lift)
+
+$(deriveJSON (aesonDrop 2 snakeCase){omitNothingFields=True} ''DropRel)
+
+dropRelP1 :: (P1C m) => DropRel -> m [SchemaObjId]
+dropRelP1 (DropRel qt rn cascade) = do
+ adminOnly
+ tabInfo <- askTabInfo qt
+ _ <- askRelType (tiFieldInfoMap tabInfo) rn ""
+ sc <- askSchemaCache
+ let depObjs = getDependentObjs sc relObjId
+ when (depObjs /= [] && not (or cascade)) $ reportDeps depObjs
+ return depObjs
+ where
+ relObjId = SOTableObj qt $ TORel rn
+
+purgeRelDep :: (P2C m) => SchemaObjId -> m ()
+purgeRelDep (SOTableObj tn (TOPerm rn pt)) =
+ purgePerm tn rn pt
+purgeRelDep d = throw500 $ "unexpected dependency of relationship : "
+ <> reportSchemaObj d
+
+dropRelP2 :: (P2C m) => DropRel -> [SchemaObjId] -> m RespBody
+dropRelP2 (DropRel qt rn _) depObjs = do
+ mapM_ purgeRelDep depObjs
+ delFldFromCache (fromRel rn) qt
+ liftTx $ delRelFromCatalog qt rn
+ return successMsg
+
+instance HDBQuery DropRel where
+
+ type Phase1Res DropRel = [SchemaObjId]
+ phaseOne = dropRelP1
+
+ phaseTwo = dropRelP2
+
+ schemaCachePolicy = SCPReload
+
+delRelFromCatalog :: QualifiedTable
+ -> RelName
+ -> Q.TxE QErr ()
+delRelFromCatalog (QualifiedTable sn tn) rn =
+ Q.unitQE defaultTxErrorHandler [Q.sql|
+ DELETE FROM
+ hdb_catalog.hdb_relationship
+ WHERE table_schema = $1
+ AND table_name = $2
+ AND rel_name = $3
+ |] (sn, tn, rn) True
+
+data SetRelComment
+ = SetRelComment
+ { arTable :: !QualifiedTable
+ , arRelationship :: !RelName
+ , arComment :: !(Maybe T.Text)
+ } deriving (Show, Eq, Lift)
+
+$(deriveJSON (aesonDrop 2 snakeCase){omitNothingFields=True} ''SetRelComment)
+
+setRelCommentP1 :: (P1C m) => SetRelComment -> m ()
+setRelCommentP1 (SetRelComment qt rn _) = do
+ adminOnly
+ tabInfo <- askTabInfo qt
+ void $ askRelType (tiFieldInfoMap tabInfo) rn ""
+
+setRelCommentP2 :: (P2C m) => SetRelComment -> m RespBody
+setRelCommentP2 arc = do
+ liftTx $ setRelComment arc
+ return successMsg
+
+instance HDBQuery SetRelComment where
+
+ type Phase1Res SetRelComment = ()
+ phaseOne = setRelCommentP1
+
+ phaseTwo q _ = setRelCommentP2 q
+
+ schemaCachePolicy = SCPNoChange
+
+setRelComment :: SetRelComment
+ -> Q.TxE QErr ()
+setRelComment (SetRelComment (QualifiedTable sn tn) rn comment) =
+ Q.unitQE defaultTxErrorHandler [Q.sql|
+ UPDATE hdb_catalog.hdb_relationship
+ SET comment = $1
+ WHERE table_schema = $2
+ AND table_name = $3
+ AND rel_name = $4
+ |] (comment, sn, tn, rn) True
diff --git a/server/src-lib/Hasura/RQL/DDL/Schema/Diff.hs b/server/src-lib/Hasura/RQL/DDL/Schema/Diff.hs
new file mode 100644
index 00000000..64c5d3e0
--- /dev/null
+++ b/server/src-lib/Hasura/RQL/DDL/Schema/Diff.hs
@@ -0,0 +1,204 @@
+{-# LANGUAGE FlexibleContexts #-}
+{-# LANGUAGE OverloadedStrings #-}
+{-# LANGUAGE QuasiQuotes #-}
+{-# LANGUAGE TemplateHaskell #-}
+
+module Hasura.RQL.DDL.Schema.Diff
+ ( TableMeta(..)
+ , PGColMeta(..)
+ , ConstraintMeta(..)
+ , fetchTableMeta
+
+ , TableDiff(..)
+ , getTableDiff
+ , getTableChangeDeps
+
+ , SchemaDiff(..)
+ , getSchemaDiff
+ , getSchemaChangeDeps
+ ) where
+
+import Hasura.RQL.Types
+import Hasura.SQL.Types
+import Hasura.Prelude
+
+import qualified Database.PG.Query as Q
+
+import Data.Aeson.Casing
+import Data.Aeson.TH
+
+import qualified Data.HashMap.Strict as M
+import qualified Data.HashSet as HS
+
+data PGColMeta
+ = PGColMeta
+ { pcmColumnName :: !PGCol
+ , pcmOrdinalPosition :: !Int
+ , pcmDataType :: !PGColType
+ } deriving (Show, Eq)
+
+$(deriveJSON (aesonDrop 3 snakeCase){omitNothingFields=True} ''PGColMeta)
+
+data ConstraintMeta
+ = ConstraintMeta
+ { cmConstraintName :: !ConstraintName
+ , cmConstraintOid :: !Int
+ } deriving (Show, Eq)
+
+$(deriveJSON (aesonDrop 2 snakeCase){omitNothingFields=True} ''ConstraintMeta)
+
+data TableMeta
+ = TableMeta
+ { tmOid :: !Int
+ , tmTable :: !QualifiedTable
+ , tmColumns :: ![PGColMeta]
+ , tmConstraints :: ![ConstraintMeta]
+ } deriving (Show, Eq)
+
+fetchTableMeta :: Q.Tx [TableMeta]
+fetchTableMeta = do
+ res <- Q.listQ [Q.sql|
+ SELECT
+ t.table_schema,
+ t.table_name,
+ t.table_oid,
+ c.columns,
+ coalesce(f.constraints, '[]') as constraints
+ FROM
+ (SELECT
+ c.oid as table_oid,
+ c.relname as table_name,
+ n.nspname as table_schema
+ FROM
+ pg_catalog.pg_class c
+ JOIN
+ pg_catalog.pg_namespace as n
+ ON
+ c.relnamespace = n.oid
+ ) t
+ INNER JOIN
+ (SELECT
+ table_schema,
+ table_name,
+ json_agg((SELECT r FROM (SELECT column_name, udt_name AS data_type, ordinal_position) r)) as columns
+ FROM
+ information_schema.columns
+ GROUP BY
+ table_schema, table_name) c
+ ON (t.table_schema = c.table_schema AND t.table_name = c.table_name)
+ LEFT OUTER JOIN
+ (SELECT
+ table_schema,
+ table_name,
+ json_agg((SELECT r FROM (SELECT constraint_name, constraint_oid) r)) as constraints
+ FROM
+ hdb_catalog.hdb_foreign_key_constraint
+ GROUP BY
+ table_schema, table_name) f
+ ON (t.table_schema = f.table_schema AND t.table_name = f.table_name)
+ WHERE
+ t.table_schema NOT LIKE 'pg_%'
+ AND t.table_schema <> 'information_schema'
+ AND t.table_schema <> 'hdb_catalog'
+ |] () False
+ forM res $ \(ts, tn, toid, cols, constrnts) ->
+ return $ TableMeta toid (QualifiedTable ts tn) (Q.getAltJ cols) (Q.getAltJ constrnts)
+
+getOverlap :: (Eq k, Hashable k) => (v -> k) -> [v] -> [v] -> [(v, v)]
+getOverlap getKey left right =
+ M.elems $ M.intersectionWith (,) (mkMap left) (mkMap right)
+ where
+ mkMap = M.fromList . map (\v -> (getKey v, v))
+
+getDifference :: (Eq k, Hashable k) => (v -> k) -> [v] -> [v] -> [v]
+getDifference getKey left right =
+ M.elems $ M.difference (mkMap left) (mkMap right)
+ where
+ mkMap = M.fromList . map (\v -> (getKey v, v))
+
+data TableDiff
+ = TableDiff
+ { _tdNewName :: !(Maybe QualifiedTable)
+ , _tdDroppedCols :: ![PGCol]
+ , _tdAddedCols :: ![PGColInfo]
+ , _tdAlteredCols :: ![(PGColInfo, PGColInfo)]
+ , _tdDroppedCons :: ![ConstraintName]
+ } deriving (Show, Eq)
+
+getTableDiff :: TableMeta -> TableMeta -> TableDiff
+getTableDiff oldtm newtm =
+ TableDiff mNewName droppedCols addedCols alteredCols droppedConstraints
+ where
+ mNewName = bool (Just $ tmTable newtm) Nothing $ tmTable oldtm == tmTable newtm
+ oldCols = tmColumns oldtm
+ newCols = tmColumns newtm
+
+ droppedCols =
+ map pcmColumnName $ getDifference pcmOrdinalPosition oldCols newCols
+
+ addedCols =
+ map pcmToPci $ getDifference pcmOrdinalPosition newCols oldCols
+
+ existingCols = getOverlap pcmOrdinalPosition oldCols newCols
+
+ pcmToPci (PGColMeta colName _ colType)
+ = PGColInfo colName colType
+
+ alteredCols =
+ flip map (filter (uncurry (/=)) existingCols) $ \(pcmo, pcmn) ->
+ (pcmToPci pcmo, pcmToPci pcmn)
+
+ droppedConstraints =
+ map cmConstraintName $ getDifference cmConstraintOid
+ (tmConstraints oldtm) (tmConstraints newtm)
+
+getTableChangeDeps :: (P2C m) => TableInfo -> TableDiff -> m [SchemaObjId]
+getTableChangeDeps ti tableDiff = do
+ sc <- askSchemaCache
+ -- for all the dropped columns
+ droppedColDeps <- fmap concat $ forM droppedCols $ \droppedCol -> do
+ let objId = SOTableObj tn $ TOCol droppedCol
+ return $ getDependentObjs sc objId
+ -- for all dropped constraints
+ droppedConsDeps <- fmap concat $ forM droppedConstraints $ \droppedCons -> do
+ let objId = SOTableObj tn $ TOCons droppedCons
+ return $ getDependentObjs sc objId
+ return $ droppedConsDeps <> droppedColDeps
+ where
+ tn = tiName ti
+ TableDiff _ droppedCols _ _ droppedConstraints = tableDiff
+
+data SchemaDiff
+ = SchemaDiff
+ { _sdDroppedTables :: ![QualifiedTable]
+ , _sdAlteredTables :: ![(QualifiedTable, TableDiff)]
+ } deriving (Show, Eq)
+
+getSchemaDiff :: [TableMeta] -> [TableMeta] -> SchemaDiff
+getSchemaDiff oldMeta newMeta =
+ SchemaDiff droppedTables survivingTables
+ where
+ droppedTables = map tmTable $ getDifference tmOid oldMeta newMeta
+ survivingTables =
+ flip map (getOverlap tmOid oldMeta newMeta) $ \(oldtm, newtm) ->
+ (tmTable oldtm, getTableDiff oldtm newtm)
+
+getSchemaChangeDeps :: (P2C m) => SchemaDiff -> m [SchemaObjId]
+getSchemaChangeDeps schemaDiff = do
+ -- Get schema cache
+ sc <- askSchemaCache
+ let tableIds = map SOTable droppedTables
+ -- Get the dependent of the dropped tables
+ let tableDropDeps = concatMap (getDependentObjs sc) tableIds
+ tableModDeps <- fmap concat $ forM alteredTables $ \(oldQtn, tableDiff) -> do
+ ti <- case M.lookup oldQtn $ scTables sc of
+ Just ti -> return ti
+ Nothing -> throw500 $ "old table metadata not found in cache : " <>> oldQtn
+ getTableChangeDeps ti tableDiff
+ return $ filter (not . isDirectDep) $
+ HS.toList $ HS.fromList $ tableDropDeps <> tableModDeps
+ where
+ SchemaDiff droppedTables alteredTables = schemaDiff
+
+ isDirectDep (SOTableObj tn _) = tn `HS.member` (HS.fromList droppedTables)
+ isDirectDep _ = False
diff --git a/server/src-lib/Hasura/RQL/DDL/Schema/Table.hs b/server/src-lib/Hasura/RQL/DDL/Schema/Table.hs
new file mode 100644
index 00000000..82324697
--- /dev/null
+++ b/server/src-lib/Hasura/RQL/DDL/Schema/Table.hs
@@ -0,0 +1,438 @@
+{-# LANGUAGE DeriveLift #-}
+{-# LANGUAGE FlexibleContexts #-}
+{-# LANGUAGE GeneralizedNewtypeDeriving #-}
+{-# LANGUAGE MultiWayIf #-}
+{-# LANGUAGE OverloadedStrings #-}
+{-# LANGUAGE QuasiQuotes #-}
+{-# LANGUAGE TemplateHaskell #-}
+{-# LANGUAGE TypeApplications #-}
+{-# LANGUAGE TypeFamilies #-}
+
+module Hasura.RQL.DDL.Schema.Table where
+
+import Hasura.Prelude
+import Hasura.RQL.DDL.Deps
+import Hasura.RQL.DDL.Permission
+import Hasura.RQL.DDL.Permission.Internal
+import Hasura.RQL.DDL.QueryTemplate
+import Hasura.RQL.DDL.Relationship
+import Hasura.RQL.DDL.Schema.Diff
+import Hasura.RQL.DDL.Utils
+import Hasura.RQL.Types
+import Hasura.SQL.Types
+
+import qualified Database.PG.Query as Q
+
+import Data.Aeson
+import Data.Aeson.Casing
+import Data.Aeson.TH
+import Instances.TH.Lift ()
+import Language.Haskell.TH.Syntax (Lift)
+
+import qualified Data.HashMap.Strict as M
+import qualified Data.Text as T
+import qualified Data.Text.Encoding as TE
+import qualified Database.PostgreSQL.LibPQ as PQ
+
+delTableFromCatalog :: QualifiedTable -> Q.Tx ()
+delTableFromCatalog (QualifiedTable sn tn) =
+ Q.unitQ [Q.sql|
+ DELETE FROM "hdb_catalog"."hdb_table"
+ WHERE table_schema = $1 AND table_name = $2
+ |] (sn, tn) False
+
+saveTableToCatalog :: QualifiedTable -> Q.Tx ()
+saveTableToCatalog (QualifiedTable sn tn) =
+ Q.unitQ [Q.sql|
+ INSERT INTO "hdb_catalog"."hdb_table" VALUES ($1, $2)
+ |] (sn, tn) False
+
+-- Build the TableInfo with all its columns
+getTableInfo :: QualifiedTable -> Q.TxE QErr TableInfo
+getTableInfo qt@(QualifiedTable sn tn) = do
+ tableExists <- Q.catchE defaultTxErrorHandler $ Q.listQ [Q.sql|
+ SELECT true from information_schema.tables
+ WHERE table_schema = $1
+ AND table_name = $2;
+ |] (sn, tn) False
+
+ -- if no columns are found, there exists no such view/table
+ unless (tableExists == [Identity True]) $
+ throw400 NotExists $ "no such table/view exists in postgres : " <>> qt
+
+ -- Fetch the column details
+ colData <- Q.catchE defaultTxErrorHandler $ Q.listQ [Q.sql|
+ SELECT column_name, to_json(udt_name)
+ FROM information_schema.columns
+ WHERE table_schema = $1
+ AND table_name = $2
+ |] (sn, tn) False
+ return $ mkTableInfo qt $ map (fmap Q.getAltJ) colData
+
+newtype TrackTable
+ = TrackTable
+ { tName :: QualifiedTable }
+ deriving (Show, Eq, FromJSON, ToJSON, Lift)
+
+trackExistingTableOrViewP1 :: TrackTable -> P1 ()
+trackExistingTableOrViewP1 (TrackTable vn) = do
+ adminOnly
+ rawSchemaCache <- getSchemaCache <$> lift ask
+ when (M.member vn $ scTables rawSchemaCache) $
+ throw400 AlreadyTracked $ "view/table already tracked : " <>> vn
+
+trackExistingTableOrViewP2Setup :: (P2C m) => QualifiedTable -> m ()
+trackExistingTableOrViewP2Setup tn = do
+ ti <- liftTx $ getTableInfo tn
+ addTableToCache ti
+
+trackExistingTableOrViewP2 :: (P2C m) => QualifiedTable -> m RespBody
+trackExistingTableOrViewP2 vn = do
+ trackExistingTableOrViewP2Setup vn
+ liftTx $ Q.catchE defaultTxErrorHandler $
+ saveTableToCatalog vn
+ return successMsg
+
+instance HDBQuery TrackTable where
+
+ type Phase1Res TrackTable = ()
+ phaseOne = trackExistingTableOrViewP1
+
+ phaseTwo (TrackTable tn) _ = trackExistingTableOrViewP2 tn
+
+ schemaCachePolicy = SCPReload
+
+purgeDep :: (CacheRWM m, MonadError QErr m, MonadTx m)
+ => SchemaObjId -> m ()
+purgeDep schemaObjId = case schemaObjId of
+ (SOTableObj tn (TOPerm rn pt)) -> do
+ liftTx $ dropPermFromCatalog tn rn pt
+ withPermType pt delPermFromCache rn tn
+
+ (SOTableObj qt (TORel rn)) -> do
+ liftTx $ delRelFromCatalog qt rn
+ delFldFromCache (fromRel rn) qt
+
+ (SOQTemplate qtn) -> do
+ liftTx $ delQTemplateFromCatalog qtn
+ delQTemplateFromCache qtn
+
+ _ -> throw500 $
+ "unexpected dependent object : " <> reportSchemaObj schemaObjId
+
+processTableChanges :: (P2C m) => TableInfo -> TableDiff -> m ()
+processTableChanges ti tableDiff = do
+
+ when (isJust mNewName) $
+ throw400 NotSupported $ "table renames are not yet supported : " <>> tn
+
+ -- for all the dropped columns
+ forM_ droppedCols $ \droppedCol ->
+ -- Drop the column from the cache
+ delFldFromCache (fromPGCol droppedCol) tn
+
+ -- In the newly added columns check that there is no conflict with relationships
+ forM_ addedCols $ \colInfo@(PGColInfo colName _) ->
+ case M.lookup (fromPGCol colName) $ tiFieldInfoMap ti of
+ Just (FIRelationship _) ->
+ throw400 AlreadyExists $ "cannot add column " <> colName
+ <<> " in table " <> tn <<>
+ " as a relationship with the name already exists"
+ _ -> addFldToCache (fromPGCol colName) (FIColumn colInfo) tn
+
+ sc <- askSchemaCache
+ -- for rest of the columns
+ forM_ alteredCols $ \(PGColInfo oColName oColTy, nci@(PGColInfo nColName nColTy)) ->
+ if | oColName /= nColName ->
+ throw400 NotSupported $ "column renames are not yet supported : " <>
+ tn <<> "." <>> oColName
+ | oColTy /= nColTy -> do
+ let colId = SOTableObj tn $ TOCol oColName
+ depObjs = getDependentObjsWith (== "on_type") sc colId
+ if null depObjs
+ then updateFldInCache oColName $ FIColumn nci
+ else throw400 DependencyError $ "cannot change type of column " <> oColName <<> " in table "
+ <> tn <<> " because of the following dependencies : " <>
+ reportSchemaObjs depObjs
+ | otherwise -> return ()
+ where
+ updateFldInCache cn ci = do
+ delFldFromCache (fromPGCol cn) tn
+ addFldToCache (fromPGCol cn) ci tn
+ tn = tiName ti
+ TableDiff mNewName droppedCols addedCols alteredCols _ = tableDiff
+
+processSchemaChanges :: (P2C m) => SchemaDiff -> m ()
+processSchemaChanges schemaDiff = do
+ -- Purge the dropped tables
+ forM_ droppedTables $ \qtn@(QualifiedTable sn tn) -> do
+ liftTx $ Q.catchE defaultTxErrorHandler $ do
+ Q.unitQ [Q.sql|
+ DELETE FROM "hdb_catalog"."hdb_relationship"
+ WHERE table_schema = $1 AND table_name = $2
+ |] (sn, tn) False
+ Q.unitQ [Q.sql|
+ DELETE FROM "hdb_catalog"."hdb_permission"
+ WHERE table_schema = $1 AND table_name = $2
+ |] (sn, tn) False
+ delTableFromCatalog qtn
+ delTableFromCache qtn
+ -- Get schema cache
+ sc <- askSchemaCache
+ forM_ alteredTables $ \(oldQtn, tableDiff) -> do
+ ti <- case M.lookup oldQtn $ scTables sc of
+ Just ti -> return ti
+ Nothing -> throw500 $ "old table metadata not found in cache : " <>> oldQtn
+ processTableChanges ti tableDiff
+ where
+ SchemaDiff droppedTables alteredTables = schemaDiff
+
+data UntrackTable =
+ UntrackTable
+ { utTable :: !QualifiedTable
+ , utCascade :: !(Maybe Bool)
+ } deriving (Show, Eq, Lift)
+$(deriveJSON (aesonDrop 2 snakeCase){omitNothingFields=True} ''UntrackTable)
+
+unTrackExistingTableOrViewP1 :: UntrackTable -> P1 (UntrackTable, TableInfo)
+unTrackExistingTableOrViewP1 ut@(UntrackTable vn _) = do
+ adminOnly
+ rawSchemaCache <- getSchemaCache <$> lift ask
+ case M.lookup vn (scTables rawSchemaCache) of
+ Just ti -> return (ut, ti)
+ Nothing -> throw400 AlreadyUntracked $
+ "view/table already untracked : " <>> vn
+
+unTrackExistingTableOrViewP2 :: (P2C m)
+ => UntrackTable -> TableInfo -> m RespBody
+unTrackExistingTableOrViewP2 (UntrackTable vn cascade) tableInfo = do
+ sc <- askSchemaCache
+
+ -- Get Foreign key constraints to this table
+ fKeyTables <- liftTx getFKeyTables
+ let fKeyDepIds = mkFKeyObjIds $ filterTables fKeyTables $ scTables sc
+
+ -- Report back with an error if any fkey object ids are present
+ when (fKeyDepIds /= []) $ reportDepsExt fKeyDepIds []
+
+ -- Get relational and query template dependants
+ let allRels = getAllRelations $ scTables sc
+ directRelDep = (vn, getRels $ tiFieldInfoMap tableInfo)
+ relDeps = directRelDep : foldl go [] allRels
+ relDepIds = concatMap mkObjIdFromRel relDeps
+ queryTDepIds = getDependentObjsOfQTemplateCache (SOTable vn)
+ (scQTemplates sc)
+ allDepIds = relDepIds <> queryTDepIds
+
+ -- Report bach with an error if cascade is not set
+ when (allDepIds /= [] && not (or cascade)) $ reportDepsExt allDepIds []
+
+ -- Purge all the dependants from state
+ mapM_ purgeDep allDepIds
+
+ -- update the schema cache with the changes
+ processSchemaChanges $ SchemaDiff [vn] []
+
+ return successMsg
+ where
+ QualifiedTable sn tn = vn
+ getFKeyTables = Q.catchE defaultTxErrorHandler $ Q.listQ [Q.sql|
+ SELECT constraint_name,
+ table_schema,
+ table_name
+ FROM hdb_catalog.hdb_foreign_key_constraint
+ WHERE ref_table_table_schema = $1
+ AND ref_table =$2
+ |] (sn, tn) False
+ filterTables tables tc = flip filter tables $ \(_, s, t) ->
+ isJust $ M.lookup (QualifiedTable s t) tc
+
+ mkFKeyObjIds tables = flip map tables $ \(cn, s, t) ->
+ SOTableObj (QualifiedTable s t) (TOCons cn)
+
+ getAllRelations tc = map getRelInfo $ M.toList tc
+ getRelInfo (qt, ti) = (qt, getRels $ tiFieldInfoMap ti)
+
+ go l (qt, ris) = if any isDep ris
+ then (qt, filter isDep ris):l
+ else l
+ isDep relInfo = vn == riRTable relInfo
+ mkObjIdFromRel (qt, ris) = flip map ris $ \ri ->
+ SOTableObj qt (TORel $ riName ri)
+
+instance HDBQuery UntrackTable where
+ type Phase1Res UntrackTable = (UntrackTable, TableInfo)
+ phaseOne = unTrackExistingTableOrViewP1
+
+ phaseTwo _ = uncurry unTrackExistingTableOrViewP2
+
+ schemaCachePolicy = SCPReload
+
+buildSchemaCache :: Q.TxE QErr SchemaCache
+buildSchemaCache = flip execStateT emptySchemaCache $ do
+ tables <- lift $ Q.catchE defaultTxErrorHandler fetchTables
+ forM_ tables $ \(sn, tn) ->
+ modifyErr (\e -> "table " <> tn <<> "; " <> e) $
+ trackExistingTableOrViewP2Setup $ QualifiedTable sn tn
+
+ -- Fetch all the relationships
+ relationships <- lift $ Q.catchE defaultTxErrorHandler fetchRelationships
+
+ forM_ relationships $ \(sn, tn, rn, rt, Q.AltJ rDef) ->
+ modifyErr (\e -> "table " <> tn <<> "; rel " <> rn <<> "; " <> e) $ case rt of
+ ObjRel -> do
+ using <- decodeValue rDef
+ objRelP2Setup (QualifiedTable sn tn) $ RelDef rn using Nothing
+ ArrRel -> do
+ using <- decodeValue rDef
+ arrRelP2Setup (QualifiedTable sn tn) $ RelDef rn using Nothing
+
+ -- Fetch all the permissions
+ permissions <- lift $ Q.catchE defaultTxErrorHandler fetchPermissions
+
+ forM_ permissions $ \(sn, tn, rn, pt, Q.AltJ pDef) ->
+ modifyErr (\e -> "table " <> tn <<> "; role " <> rn <<> "; " <> e) $ case pt of
+ PTInsert -> permHelper sn tn rn pDef PAInsert
+ PTSelect -> permHelper sn tn rn pDef PASelect
+ PTUpdate -> permHelper sn tn rn pDef PAUpdate
+ PTDelete -> permHelper sn tn rn pDef PADelete
+
+ -- Fetch all the query templates
+ qtemplates <- lift $ Q.catchE defaultTxErrorHandler fetchQTemplates
+ forM_ qtemplates $ \(qtn, Q.AltJ qtDefVal) -> do
+ qtDef <- decodeValue qtDefVal
+ qCtx <- mkAdminQCtx <$> get
+ qti <- liftP1 qCtx $ createQueryTemplateP1 $
+ CreateQueryTemplate qtn qtDef Nothing
+ addQTemplateToCache qti
+ where
+ permHelper sn tn rn pDef pa = do
+ qCtx <- mkAdminQCtx <$> get
+ perm <- decodeValue pDef
+ let qt = QualifiedTable sn tn
+ permDef = PermDef rn perm Nothing
+ createPerm = WithTable qt permDef
+ p1Res <- liftP1 qCtx $ phaseOne createPerm
+ addPermP2Setup qt permDef p1Res
+ addPermToCache qt rn pa p1Res
+ -- p2F qt rn p1Res
+
+ fetchTables =
+ Q.listQ [Q.sql|
+ SELECT table_schema, table_name from hdb_catalog.hdb_table
+ |] () False
+
+ fetchRelationships =
+ Q.listQ [Q.sql|
+ SELECT table_schema, table_name, rel_name, rel_type, rel_def::json
+ FROM hdb_catalog.hdb_relationship
+ |] () False
+
+ fetchPermissions =
+ Q.listQ [Q.sql|
+ SELECT table_schema, table_name, role_name, perm_type, perm_def::json
+ FROM hdb_catalog.hdb_permission
+ |] () False
+
+ fetchQTemplates =
+ Q.listQ [Q.sql|
+ SELECT template_name, template_defn :: json FROM hdb_catalog.hdb_query_template
+ |] () False
+
+data RunSQL
+ = RunSQL
+ { rSql :: T.Text
+ , rCascade :: !(Maybe Bool)
+ } deriving (Show, Eq, Lift)
+
+$(deriveJSON (aesonDrop 1 snakeCase){omitNothingFields=True} ''RunSQL)
+
+data RunSQLRes
+ = RunSQLRes
+ { rrResultType :: !T.Text
+ , rrResult :: !Value
+ } deriving (Show, Eq)
+
+$(deriveJSON (aesonDrop 2 snakeCase){omitNothingFields=True} ''RunSQLRes)
+
+runSqlP2 :: (P2C m) => RunSQL -> m RespBody
+runSqlP2 (RunSQL t cascade) = do
+
+ -- Drop hdb_views so no interference is caused to the sql query
+ liftTx $ Q.catchE defaultTxErrorHandler $
+ Q.unitQ clearHdbViews () False
+
+ -- Get the metadata before the sql query, everything, need to filter this
+ oldMetaU <- liftTx $ Q.catchE defaultTxErrorHandler fetchTableMeta
+
+ -- Run the SQL
+ res <- liftTx $ Q.multiQE rawSqlErrHandler $ Q.fromBuilder $ TE.encodeUtf8Builder t
+
+ -- Get the metadata after the sql query
+ newMeta <- liftTx $ Q.catchE defaultTxErrorHandler fetchTableMeta
+ sc <- askSchemaCache
+ let existingTables = M.keys $ scTables sc
+ oldMeta = flip filter oldMetaU $ \tm -> tmTable tm `elem` existingTables
+ schemaDiff = getSchemaDiff oldMeta newMeta
+
+ indirectDeps <- getSchemaChangeDeps schemaDiff
+
+ -- Report back with an error if cascade is not set
+ when (indirectDeps /= [] && not (or cascade)) $ reportDepsExt indirectDeps []
+
+ -- Purge all the indirect dependents from state
+ mapM_ purgeDep indirectDeps
+
+ -- update the schema cache with the changes
+ processSchemaChanges schemaDiff
+
+ postSc <- askSchemaCache
+ -- recreate the insert permission infra
+ forM_ (M.elems $ scTables postSc) $ \ti -> do
+ let tn = tiName ti
+ forM_ (M.elems $ tiRolePermInfoMap ti) $ \rpi ->
+ maybe (return ()) (liftTx . buildInsInfra tn) $ _permIns rpi
+
+ return $ encode (res :: RunSQLRes)
+
+ where
+ rawSqlErrHandler :: Q.PGTxErr -> QErr
+ rawSqlErrHandler txe =
+ let e = err400 PostgresError "query execution failed"
+ in e {qeInternal = Just $ toJSON txe}
+
+instance HDBQuery RunSQL where
+
+ type Phase1Res RunSQL = ()
+ phaseOne _ = adminOnly
+
+ phaseTwo q _ = runSqlP2 q
+
+ schemaCachePolicy = SCPReload
+
+-- Should be used only after checking the status
+resToCSV :: PQ.Result -> ExceptT T.Text IO [[T.Text]]
+resToCSV r = do
+ nr <- liftIO $ PQ.ntuples r
+ nc <- liftIO $ PQ.nfields r
+
+ hdr <- forM [0..pred nc] $ \ic -> do
+ colNameBS <- liftIO $ PQ.fname r ic
+ maybe (return "unknown") decodeBS colNameBS
+
+ rows <- forM [0..pred nr] $ \ir ->
+ forM [0..pred nc] $ \ic -> do
+ cellValBS <- liftIO $ PQ.getvalue r ir ic
+ maybe (return "NULL") decodeBS cellValBS
+
+ return $ hdr:rows
+
+ where
+ decodeBS = either (throwError . T.pack . show) return . TE.decodeUtf8'
+
+instance Q.FromRes RunSQLRes where
+ fromRes (Q.ResultOkEmpty _) =
+ return $ RunSQLRes "CommandOk" Null
+ fromRes (Q.ResultOkData res) = do
+ csvRows <- resToCSV res
+ return $ RunSQLRes "TuplesOk" $ toJSON csvRows
diff --git a/server/src-lib/Hasura/RQL/DDL/Utils.hs b/server/src-lib/Hasura/RQL/DDL/Utils.hs
new file mode 100644
index 00000000..38f192d5
--- /dev/null
+++ b/server/src-lib/Hasura/RQL/DDL/Utils.hs
@@ -0,0 +1,15 @@
+{-# LANGUAGE OverloadedStrings #-}
+
+module Hasura.RQL.DDL.Utils where
+
+import qualified Database.PG.Query as Q
+
+clearHdbViews :: Q.Query
+clearHdbViews =
+ "DO $$ DECLARE \
+ \ r RECORD; \
+ \ BEGIN \
+ \ FOR r IN (SELECT viewname FROM pg_views WHERE schemaname = 'hdb_views') LOOP \
+ \ EXECUTE 'DROP VIEW IF EXISTS hdb_views.' || quote_ident(r.viewname) || ' CASCADE'; \
+ \ END LOOP; \
+ \ END $$ "
diff --git a/server/src-lib/Hasura/RQL/DML/Count.hs b/server/src-lib/Hasura/RQL/DML/Count.hs
new file mode 100644
index 00000000..a95a543e
--- /dev/null
+++ b/server/src-lib/Hasura/RQL/DML/Count.hs
@@ -0,0 +1,122 @@
+{-# LANGUAGE FlexibleContexts #-}
+{-# LANGUAGE FlexibleInstances #-}
+{-# LANGUAGE OverloadedStrings #-}
+{-# LANGUAGE TypeFamilies #-}
+
+module Hasura.RQL.DML.Count where
+
+import Data.Aeson
+import Instances.TH.Lift ()
+
+import qualified Data.ByteString.Builder as BB
+import qualified Data.Sequence as DS
+
+import Hasura.Prelude
+import Hasura.RQL.DML.Internal
+import Hasura.RQL.GBoolExp
+import Hasura.RQL.Types
+import Hasura.SQL.Types
+
+import qualified Database.PG.Query as Q
+import qualified Hasura.SQL.DML as S
+
+data CountQueryP1
+ = CountQueryP1
+ { cqp1Table :: !QualifiedTable
+ , cqp1Where :: !(S.BoolExp, Maybe (GBoolExp AnnSQLBoolExp))
+ , cqp1Distinct :: !(Maybe [PGCol])
+ } deriving (Show, Eq)
+
+getCountDeps
+ :: CountQueryP1 -> [SchemaDependency]
+getCountDeps (CountQueryP1 tn (_, mWc) mDistCols) =
+ mkParentDep tn
+ : fromMaybe [] whereDeps
+ <> fromMaybe [] distDeps
+ where
+ distDeps = map (mkColDep "untyped" tn) <$> mDistCols
+ whereDeps = getBoolExpDeps tn <$> mWc
+
+mkSQLCount
+ :: CountQueryP1 -> S.Select
+mkSQLCount (CountQueryP1 tn (permFltr, mWc) mDistCols) =
+ S.mkSelect
+ { S.selExtr = [S.Extractor (S.SEFnApp "count" [S.SEStar] Nothing) Nothing]
+ , S.selFrom = Just $ S.FromExp
+ [S.mkSelFromExp False innerSel $ TableName "r"]
+ }
+ where
+
+ finalWC =
+ S.BEBin S.AndOp permFltr $
+ maybe (S.BELit True) cBoolExp mWc
+
+ innerSel = partSel
+ { S.selFrom = Just $ S.mkSimpleFromExp tn
+ , S.selWhere = S.WhereFrag <$> Just finalWC
+ }
+
+ partSel = case mDistCols of
+ Just distCols ->
+ let extrs = flip map distCols $ \c -> S.Extractor (S.mkSIdenExp c) Nothing
+ in S.mkSelect
+ { S.selDistinct = Just S.DistinctSimple
+ , S.selExtr = extrs
+ }
+ Nothing -> S.mkSelect
+ { S.selExtr = [S.Extractor S.SEStar Nothing] }
+
+-- SELECT count(*) FROM (SELECT DISTINCT c1, .. cn FROM .. WHERE ..) r;
+-- SELECT count(*) FROM (SELECT * FROM .. WHERE ..) r;
+countP1
+ :: (P1C m)
+ => (PGColType -> Value -> m S.SQLExp)
+ -> CountQuery
+ -> m CountQueryP1
+countP1 prepValBuilder (CountQuery qt mDistCols mWhere) = do
+ tableInfo <- askTabInfo qt
+
+ -- Check if select is allowed
+ selPerm <- modifyErr (<> selNecessaryMsg) $
+ askSelPermInfo tableInfo
+
+ let colInfoMap = tiFieldInfoMap tableInfo
+
+ forM_ mDistCols $ \distCols -> do
+ let distColAsrns = [ checkSelOnCol selPerm
+ , assertPGCol colInfoMap relInDistColsErr]
+ withPathK "distinct" $ verifyAsrns distColAsrns distCols
+
+ -- convert the where clause
+ annSQLBoolExp <- forM mWhere $ \be ->
+ withPathK "where" $
+ convBoolExp' colInfoMap qt selPerm be prepValBuilder
+
+ return $ CountQueryP1
+ qt
+ (spiFilter selPerm, annSQLBoolExp)
+ mDistCols
+ where
+ selNecessaryMsg =
+ "; \"count\" is only allowed if the role "
+ <> "has \"select\" permissions on the table"
+ relInDistColsErr =
+ "Relationships can't be used in \"distinct\"."
+
+countP2 :: (P2C m) => (CountQueryP1, DS.Seq Q.PrepArg) -> m RespBody
+countP2 (u, p) = do
+ qRes <- liftTx $ Q.rawQE dmlTxErrorHandler (Q.fromBuilder countSQL) (toList p) True
+ return $ BB.toLazyByteString $ encodeCount qRes
+ where
+ countSQL = toSQL $ mkSQLCount u
+ encodeCount (Q.SingleRow (Identity c)) =
+ BB.byteString "{\"count\":" <> BB.intDec c <> BB.char7 '}'
+
+instance HDBQuery CountQuery where
+
+ type Phase1Res CountQuery = (CountQueryP1, DS.Seq Q.PrepArg)
+ phaseOne = flip runStateT DS.empty . countP1 binRHSBuilder
+
+ phaseTwo _ = countP2
+
+ schemaCachePolicy = SCPNoChange
diff --git a/server/src-lib/Hasura/RQL/DML/Delete.hs b/server/src-lib/Hasura/RQL/DML/Delete.hs
new file mode 100644
index 00000000..51f24b10
--- /dev/null
+++ b/server/src-lib/Hasura/RQL/DML/Delete.hs
@@ -0,0 +1,103 @@
+{-# LANGUAGE FlexibleContexts #-}
+{-# LANGUAGE FlexibleInstances #-}
+{-# LANGUAGE OverloadedStrings #-}
+{-# LANGUAGE TypeFamilies #-}
+
+module Hasura.RQL.DML.Delete where
+
+import Data.Aeson
+import Instances.TH.Lift ()
+
+import qualified Data.Sequence as DS
+
+import Hasura.Prelude
+import Hasura.RQL.DML.Internal
+import Hasura.RQL.DML.Returning
+import Hasura.RQL.GBoolExp
+import Hasura.RQL.Types
+import Hasura.SQL.Types
+
+import qualified Database.PG.Query as Q
+import qualified Hasura.SQL.DML as S
+
+data DeleteQueryP1
+ = DeleteQueryP1
+ { dqp1Table :: !QualifiedTable
+ , dqp1Where :: !(S.BoolExp, GBoolExp AnnSQLBoolExp)
+ , dqp1MutFlds :: !MutFlds
+ } deriving (Show, Eq)
+
+mkSQLDelete
+ :: DeleteQueryP1 -> S.SelectWith
+mkSQLDelete (DeleteQueryP1 tn (fltr, wc) mutFlds) =
+ mkSelWith (S.CTEDelete delete) mutFlds
+ where
+ delete = S.SQLDelete tn Nothing tableFltr $ Just S.returningStar
+ tableFltr = Just $ S.WhereFrag $ S.BEBin S.AndOp fltr $ cBoolExp wc
+
+getDeleteDeps
+ :: DeleteQueryP1 -> [SchemaDependency]
+getDeleteDeps (DeleteQueryP1 tn (_, wc) mutFlds) =
+ mkParentDep tn : whereDeps <> retDeps
+ where
+ whereDeps = getBoolExpDeps tn wc
+ retDeps = map (mkColDep "untyped" tn . fst) $
+ pgColsFromMutFlds mutFlds
+
+convDeleteQuery
+ :: (P1C m)
+ => (PGColType -> Value -> m S.SQLExp)
+ -> DeleteQuery
+ -> m DeleteQueryP1
+convDeleteQuery prepValBuilder (DeleteQuery tableName rqlBE mRetCols) = do
+ tableInfo <- askTabInfo tableName
+
+ -- Check if the role has delete permissions
+ delPerm <- askDelPermInfo tableInfo
+
+ -- Check if all dependent headers are present
+ validateHeaders $ dpiRequiredHeaders delPerm
+
+ -- Check if select is allowed
+ selPerm <- modifyErr (<> selNecessaryMsg) $
+ askSelPermInfo tableInfo
+
+ let fieldInfoMap = tiFieldInfoMap tableInfo
+
+ -- convert the returning cols into sql returing exp
+ mAnnRetCols <- forM mRetCols $ \retCols ->
+ withPathK "returning" $
+ zip retCols <$> checkRetCols fieldInfoMap selPerm retCols
+
+ -- convert the where clause
+ annSQLBoolExp <- withPathK "where" $
+ convBoolExp' fieldInfoMap tableName selPerm rqlBE prepValBuilder
+
+ return $ DeleteQueryP1 tableName
+ (dpiFilter delPerm, annSQLBoolExp)
+ (mkDefaultMutFlds mAnnRetCols)
+
+ where
+ selNecessaryMsg =
+ "; \"delete\" is only allowed if the role "
+ <> "has \"select\" permission as \"where\" can't be used "
+ <> "without \"select\" permission on the table"
+
+convDelQ :: DeleteQuery -> P1 (DeleteQueryP1, DS.Seq Q.PrepArg)
+convDelQ delQ = flip runStateT DS.empty $ convDeleteQuery binRHSBuilder delQ
+
+deleteP2 :: (DeleteQueryP1, DS.Seq Q.PrepArg) -> Q.TxE QErr RespBody
+deleteP2 (u, p) =
+ runIdentity . Q.getRow
+ <$> Q.rawQE dmlTxErrorHandler (Q.fromBuilder deleteSQL) (toList p) True
+ where
+ deleteSQL = toSQL $ mkSQLDelete u
+
+instance HDBQuery DeleteQuery where
+
+ type Phase1Res DeleteQuery = (DeleteQueryP1, DS.Seq Q.PrepArg)
+ phaseOne = convDelQ
+
+ phaseTwo _ = liftTx . deleteP2
+
+ schemaCachePolicy = SCPNoChange
diff --git a/server/src-lib/Hasura/RQL/DML/Explain.hs b/server/src-lib/Hasura/RQL/DML/Explain.hs
new file mode 100644
index 00000000..afd6e616
--- /dev/null
+++ b/server/src-lib/Hasura/RQL/DML/Explain.hs
@@ -0,0 +1,57 @@
+{-# LANGUAGE FlexibleContexts #-}
+{-# LANGUAGE OverloadedStrings #-}
+{-# LANGUAGE TemplateHaskell #-}
+
+module Hasura.RQL.DML.Explain where
+
+import Data.Aeson
+import Data.Aeson.Casing
+import Data.Aeson.TH
+
+import qualified Data.ByteString.Builder as BB
+
+import Hasura.Prelude
+import Hasura.RQL.DML.Internal
+import Hasura.RQL.DML.Select
+import Hasura.RQL.GBoolExp
+import Hasura.RQL.Types
+import Hasura.SQL.Types
+
+import qualified Data.String.Conversions as CS
+import qualified Data.Text as T
+import qualified Database.PG.Query as Q
+
+data RQLExplain =
+ RQLExplain
+ { rqleQuery :: !SelectQuery
+ , rqleRole :: !RoleName
+ , rqleHeaders :: !HeaderObj
+ } deriving (Show, Eq)
+$(deriveJSON (aesonDrop 4 camelCase) ''RQLExplain)
+
+data ExplainResp =
+ ExplainResp
+ { erSql :: !T.Text
+ , erPlans :: !Value
+ } deriving (Show, Eq)
+$(deriveJSON (aesonDrop 2 camelCase) ''ExplainResp)
+
+phaseOneExplain :: SelectQuery -> P1 SelectData
+phaseOneExplain = convSelectQuery txtRHSBuilder
+
+phaseTwoExplain :: (P2C m) => SelectData -> m RespBody
+phaseTwoExplain sel = do
+ planResp <- liftTx $ runIdentity . Q.getRow <$> Q.rawQE dmlTxErrorHandler (Q.fromBuilder withExplain) [] True
+ plans <- decodeBS planResp
+ return $ encode $ ExplainResp selectSQLT plans
+ where
+ selectSQL = toSQL $ mkSQLSelect sel
+ explainSQL = BB.string7 "EXPLAIN (FORMAT JSON) "
+ withExplain = explainSQL <> selectSQL
+
+ decodeBS bs = case eitherDecode bs of
+ Left e -> throw500 $
+ "Plan query response is invalid json; " <> T.pack e
+ Right a -> return a
+
+ selectSQLT = CS.cs $ BB.toLazyByteString selectSQL
diff --git a/server/src-lib/Hasura/RQL/DML/Insert.hs b/server/src-lib/Hasura/RQL/DML/Insert.hs
new file mode 100644
index 00000000..0705a1ca
--- /dev/null
+++ b/server/src-lib/Hasura/RQL/DML/Insert.hs
@@ -0,0 +1,197 @@
+{-# LANGUAGE FlexibleContexts #-}
+{-# LANGUAGE FlexibleInstances #-}
+{-# LANGUAGE OverloadedStrings #-}
+{-# LANGUAGE TypeFamilies #-}
+
+module Hasura.RQL.DML.Insert where
+
+import Data.Aeson.Types
+import Instances.TH.Lift ()
+
+import qualified Data.HashMap.Strict as HM
+import qualified Data.Sequence as DS
+
+import Hasura.Prelude
+import Hasura.RQL.DML.Internal
+import Hasura.RQL.DML.Returning
+import Hasura.RQL.Instances ()
+import Hasura.RQL.Types
+import Hasura.SQL.Types
+
+import qualified Database.PG.Query as Q
+import qualified Hasura.SQL.DML as S
+
+data ConflictTarget
+ = Column ![PGCol]
+ | Constraint !ConstraintName
+ deriving (Show, Eq)
+
+data ConflictClauseP1
+ = CP1DoNothing !(Maybe ConflictTarget)
+ | CP1Update !ConflictTarget ![PGCol]
+ deriving (Show, Eq)
+
+data InsertQueryP1
+ = InsertQueryP1
+ { iqp1Table :: !QualifiedTable
+ , iqp1View :: !QualifiedTable
+ , iqp1Cols :: ![PGCol]
+ , iqp1Tuples :: ![[S.SQLExp]]
+ , iqp1Conflict :: !(Maybe ConflictClauseP1)
+ , iqp1MutFlds :: !MutFlds
+ } deriving (Show, Eq)
+
+mkSQLInsert :: InsertQueryP1 -> S.SelectWith
+mkSQLInsert (InsertQueryP1 _ vn cols vals c mutFlds) =
+ mkSelWith (S.CTEInsert insert) mutFlds
+ where
+ insert =
+ S.SQLInsert vn cols vals (toSQLConflict c) $ Just S.returningStar
+ toSQLConflict conflict = case conflict of
+ Nothing -> Nothing
+ Just (CP1DoNothing Nothing) -> Just $ S.DoNothing Nothing
+ Just (CP1DoNothing (Just ct)) -> Just $ S.DoNothing $ Just $ toSQLCT ct
+ Just (CP1Update ct pgCols) -> Just $ S.Update (toSQLCT ct)
+ (S.SetExp $ toSQLSetExps pgCols)
+
+ toSQLCT ct = case ct of
+ Column pgCols -> S.SQLColumn pgCols
+ Constraint cn -> S.SQLConstraint cn
+
+ toSQLSetExps = map $ \col
+ -> S.SetExpItem (col, S.SEExcluded $ getPGColTxt col)
+
+mkDefValMap :: FieldInfoMap -> HM.HashMap PGCol S.SQLExp
+mkDefValMap cim =
+ HM.fromList $ flip zip (repeat $ S.SEUnsafe "DEFAULT") $
+ map (PGCol . getFieldNameTxt) $ HM.keys $ HM.filter isPGColInfo cim
+
+getInsertDeps
+ :: InsertQueryP1 -> [SchemaDependency]
+getInsertDeps (InsertQueryP1 tn _ _ _ _ mutFlds) =
+ mkParentDep tn : retDeps
+ where
+ retDeps = map (mkColDep "untyped" tn . fst) $
+ pgColsFromMutFlds mutFlds
+
+convObj
+ :: (P1C m)
+ => (PGColType -> Value -> m S.SQLExp)
+ -> HM.HashMap PGCol S.SQLExp
+ -> FieldInfoMap
+ -> InsObj
+ -> m [S.SQLExp]
+convObj prepFn defInsVals fieldInfoMap insObj = do
+ inpInsVals <- flip HM.traverseWithKey insObj $ \c val -> do
+ let relWhenPGErr = "relationships can't be inserted"
+ colType <- askPGType fieldInfoMap c relWhenPGErr
+ -- Encode aeson's value into prepared value
+ withPathK (getPGColTxt c) $ prepFn colType val
+
+ return $ HM.elems $ HM.union inpInsVals defInsVals
+
+buildConflictClause
+ :: (P1C m)
+ => TableInfo
+ -> OnConflict
+ -> m ConflictClauseP1
+buildConflictClause tableInfo (OnConflict mTCol mTCons act) = case (mTCol, mTCons, act) of
+ (Nothing, Nothing, CAIgnore) -> return $ CP1DoNothing Nothing
+ (Just col, Nothing, CAIgnore) -> do
+ validateCols col
+ return $ CP1DoNothing $ Just $ Column $ getPGCols col
+ (Nothing, Just cons, CAIgnore) -> return $ CP1DoNothing $ Just $ Constraint cons
+ (Nothing, Nothing, CAUpdate) -> throw400 UnexpectedPayload
+ "Expecting 'constraint' or 'constraint_on' when the 'action' is 'update'"
+ (Just col, Nothing, CAUpdate) -> do
+ validateCols col
+ return $ CP1Update (Column $ getPGCols col) columns
+ (Nothing, Just cons, CAUpdate) -> return $ CP1Update (Constraint cons) columns
+ (Just _, Just _, _) -> throw400 UnexpectedPayload
+ "'constraint' and 'constraint_on' cannot be set at a time"
+ where
+ fieldInfoMap = tiFieldInfoMap tableInfo
+ columns = map pgiName $ getCols fieldInfoMap
+ validateCols c = do
+ let targetcols = getPGCols c
+ void $ withPathK "constraint_on" $ indexedForM targetcols $
+ \pgCol -> askPGType fieldInfoMap pgCol ""
+
+convInsertQuery
+ :: (P1C m)
+ => (Value -> m [InsObj])
+ -> (PGColType -> Value -> m S.SQLExp)
+ -> InsertQuery
+ -> m InsertQueryP1
+convInsertQuery objsParser prepFn (InsertQuery tableName val oC mRetCols) = do
+
+ insObjs <- objsParser val
+
+ -- Get the current table information
+ tableInfo <- askTabInfo tableName
+
+ -- Check if the role has insert permissions
+ insPerm <- askInsPermInfo tableInfo
+
+ -- Check if all dependent headers are present
+ validateHeaders $ ipiRequiredHeaders insPerm
+
+ let fieldInfoMap = tiFieldInfoMap tableInfo
+
+ -- convert the returning cols into sql returing exp
+ mAnnRetCols <- forM mRetCols $ \retCols -> do
+ -- Check if select is allowed only if you specify returning
+ selPerm <- modifyErr (<> selNecessaryMsg) $
+ askSelPermInfo tableInfo
+
+ withPathK "returning" $
+ zip retCols <$> checkRetCols fieldInfoMap selPerm retCols
+
+ let mutFlds = mkDefaultMutFlds mAnnRetCols
+
+ let defInsVals = mkDefValMap fieldInfoMap
+ insCols = HM.keys defInsVals
+ insView = ipiView insPerm
+
+ insTuples <- withPathK "objects" $ indexedForM insObjs $ \obj ->
+ convObj prepFn defInsVals fieldInfoMap obj
+
+ conflictClause <- withPathK "on_conflict" $ forM oC $ \c -> do
+ roleName <- askCurRole
+ unless (ipiAllowUpsert insPerm) $ throw400 PermissionDenied $
+ "upsert is not allowed for role" <>> roleName
+ buildConflictClause tableInfo c
+ return $ InsertQueryP1 tableName insView insCols insTuples
+ conflictClause mutFlds
+
+ where
+ selNecessaryMsg =
+ "; \"returning\" can only be used if the role has "
+ <> "\"select\" permission on the table"
+
+decodeInsObjs :: (P1C m) => Value -> m [InsObj]
+decodeInsObjs v = do
+ objs <- decodeValue v
+ when (null objs) $ throw400 UnexpectedPayload "objects should not be empty"
+ return objs
+
+convInsQ :: InsertQuery -> P1 (InsertQueryP1, DS.Seq Q.PrepArg)
+convInsQ insQ =
+ flip runStateT DS.empty $ convInsertQuery
+ (withPathK "objects" . decodeInsObjs) binRHSBuilder insQ
+
+insertP2 :: (InsertQueryP1, DS.Seq Q.PrepArg) -> Q.TxE QErr RespBody
+insertP2 (u, p) =
+ runIdentity . Q.getRow
+ <$> Q.rawQE dmlTxErrorHandler (Q.fromBuilder insertSQL) (toList p) True
+ where
+ insertSQL = toSQL $ mkSQLInsert u
+
+instance HDBQuery InsertQuery where
+
+ type Phase1Res InsertQuery = (InsertQueryP1, DS.Seq Q.PrepArg)
+ phaseOne = convInsQ
+
+ phaseTwo _ = liftTx . insertP2
+
+ schemaCachePolicy = SCPNoChange
diff --git a/server/src-lib/Hasura/RQL/DML/Internal.hs b/server/src-lib/Hasura/RQL/DML/Internal.hs
new file mode 100644
index 00000000..6d10bb72
--- /dev/null
+++ b/server/src-lib/Hasura/RQL/DML/Internal.hs
@@ -0,0 +1,287 @@
+{-# LANGUAGE FlexibleContexts #-}
+{-# LANGUAGE FlexibleInstances #-}
+{-# LANGUAGE LambdaCase #-}
+{-# LANGUAGE OverloadedStrings #-}
+{-# LANGUAGE TypeFamilies #-}
+
+module Hasura.RQL.DML.Internal where
+
+import qualified Database.PG.Query as Q
+import qualified Database.PG.Query.Connection as Q
+import qualified Hasura.SQL.DML as S
+
+import Hasura.SQL.Types
+import Hasura.SQL.Value
+import Hasura.RQL.GBoolExp
+import Hasura.RQL.Types
+import Hasura.Prelude
+
+import Control.Lens
+import Data.Aeson.Types
+
+import qualified Data.HashMap.Strict as M
+import qualified Data.HashSet as HS
+import qualified Data.Sequence as DS
+import qualified Data.Text as T
+
+-- class (P1C m) => Preparable m where
+-- prepValBuilder :: PGColType -> Value -> m S.SQLExp
+
+type DMLP1 = StateT (DS.Seq Q.PrepArg) P1
+
+instance CacheRM DMLP1 where
+ askSchemaCache = lift askSchemaCache
+
+instance UserInfoM DMLP1 where
+ askUserInfo = lift askUserInfo
+
+-- instance P1C DMLP1 where
+-- askUserInfo = lift askUserInfo
+
+-- instance Preparable DMLP1 where
+-- prepValBuilder = binRHSBuilder
+
+peelDMLP1 :: QCtx -> DMLP1 a -> Either QErr (a, [Q.PrepArg])
+peelDMLP1 qEnv m = do
+ (a, prepSeq) <- runP1 qEnv $ runStateT m DS.empty
+ return (a, toList prepSeq)
+
+mkAdminRolePermInfo :: TableInfo -> RolePermInfo
+mkAdminRolePermInfo ti =
+ RolePermInfo (Just i) (Just s) (Just u) (Just d)
+ where
+ pgCols = map pgiName
+ . fst . partitionEithers
+ . map fieldInfoToEither . M.elems $ tiFieldInfoMap ti
+
+ tn = tiName ti
+ i = InsPermInfo tn (S.BELit True) True [] []
+ s = SelPermInfo (HS.fromList pgCols) tn (S.BELit True) [] []
+ u = UpdPermInfo (HS.fromList pgCols) tn (S.BELit True) [] []
+ d = DelPermInfo tn (S.BELit True) [] []
+
+askPermInfo'
+ :: (P1C m)
+ => PermAccessor c
+ -> TableInfo
+ -> m (Maybe c)
+askPermInfo' pa tableInfo = do
+ roleName <- askCurRole
+ let mrpi = getRolePermInfo roleName
+ return $ mrpi >>= (^. permAccToLens pa)
+ where
+ rpim = tiRolePermInfoMap tableInfo
+ getRolePermInfo roleName
+ | roleName == adminRole = Just $ mkAdminRolePermInfo tableInfo
+ | otherwise = M.lookup roleName rpim
+
+askPermInfo
+ :: (P1C m)
+ => PermAccessor c
+ -> TableInfo
+ -> m c
+askPermInfo pa tableInfo = do
+ roleName <- askCurRole
+ mPermInfo <- askPermInfo' pa tableInfo
+ case mPermInfo of
+ Just c -> return c
+ Nothing -> throw400 PermissionDenied $ mconcat
+ [ pt <> " on " <>> tiName tableInfo
+ , " for role " <>> roleName
+ , " is not allowed. "
+ ]
+ where
+ pt = permTypeToCode $ permAccToType pa
+
+askInsPermInfo
+ :: (P1C m)
+ => TableInfo -> m InsPermInfo
+askInsPermInfo = askPermInfo PAInsert
+
+askSelPermInfo
+ :: (P1C m)
+ => TableInfo -> m SelPermInfo
+askSelPermInfo = askPermInfo PASelect
+
+askUpdPermInfo
+ :: (P1C m)
+ => TableInfo -> m UpdPermInfo
+askUpdPermInfo = askPermInfo PAUpdate
+
+askDelPermInfo
+ :: (P1C m)
+ => TableInfo -> m DelPermInfo
+askDelPermInfo = askPermInfo PADelete
+
+verifyAsrns :: (MonadError QErr m) => [a -> m ()] -> [a] -> m ()
+verifyAsrns preds xs = indexedForM_ xs $ \a -> mapM_ ($ a) preds
+
+checkSelOnCol :: (UserInfoM m, QErrM m)
+ => SelPermInfo -> PGCol -> m ()
+checkSelOnCol selPermInfo =
+ checkPermOnCol PTSelect (spiCols selPermInfo)
+
+checkPermOnCol
+ :: (UserInfoM m, QErrM m)
+ => PermType
+ -> HS.HashSet PGCol
+ -> PGCol
+ -> m ()
+checkPermOnCol pt allowedCols pgCol = do
+ roleName <- askCurRole
+ unless (HS.member pgCol allowedCols) $
+ throw400 PermissionDenied $ permErrMsg roleName
+ where
+ permErrMsg (RoleName "admin") =
+ "no such column exists : " <>> pgCol
+ permErrMsg roleName =
+ mconcat
+ [ "role " <>> roleName
+ , " does not have permission to "
+ , permTypeToCode pt <> " column " <>> pgCol
+ ]
+
+binRHSBuilder :: PGColType -> Value -> DMLP1 S.SQLExp
+binRHSBuilder colType val = do
+ preparedArgs <- get
+ binVal <- runAesonParser (convToBin colType) val
+ put (preparedArgs DS.|> binVal)
+ return $ toPrepParam (DS.length preparedArgs + 1) colType
+
+fetchRelTabInfo
+ :: (P1C m)
+ => QualifiedTable
+ -> m TableInfo
+fetchRelTabInfo refTabName =
+ -- Internal error
+ modifyErrAndSet500 ("foreign " <> ) $ askTabInfo refTabName
+
+fetchRelDet
+ :: (P1C m)
+ => RelName -> QualifiedTable
+ -> m (FieldInfoMap, SelPermInfo)
+fetchRelDet relName refTabName = do
+ roleName <- askCurRole
+ -- Internal error
+ refTabInfo <- fetchRelTabInfo refTabName
+ -- Get the correct constraint that applies to the given relationship
+ refSelPerm <- modifyErr (relPermErr refTabName roleName) $
+ askSelPermInfo refTabInfo
+
+ return (tiFieldInfoMap refTabInfo, refSelPerm)
+ where
+ relPermErr rTable roleName _ =
+ mconcat
+ [ "role " <>> roleName
+ , " does not have permission to read relationship " <>> relName
+ , "; no permission on"
+ , " table " <>> rTable
+ ]
+
+checkOnColExp :: (P1C m)
+ => SelPermInfo -> AnnValS -> m AnnValS
+checkOnColExp spi annVal =
+ case annVal of
+ AVCol pci@(PGColInfo cn _) opExps -> do
+ checkSelOnCol spi cn
+ return $ AVCol pci opExps
+ AVRel relInfo nesAnn _ -> do
+ relSPI <- snd <$> fetchRelDet (riName relInfo) (riRTable relInfo)
+ modAnn <- checkSelPerm relSPI nesAnn
+ return $ AVRel relInfo modAnn $ spiFilter relSPI
+
+checkSelPerm :: (P1C m)
+ => SelPermInfo -> GBoolExp AnnValS -> m (GBoolExp AnnValS)
+checkSelPerm spi = mapBoolExp (checkOnColExp spi)
+
+convBoolExp
+ :: (P1C m)
+ => FieldInfoMap
+ -> QualifiedTable
+ -> SelPermInfo
+ -> BoolExp
+ -> (PGColType -> Value -> m S.SQLExp)
+ -> m S.BoolExp
+convBoolExp cim tn spi be prepValBuilder =
+ cBoolExp <$> convBoolExp' cim tn spi be prepValBuilder
+
+convBoolExp'
+ :: (P1C m)
+ => FieldInfoMap
+ -> QualifiedTable
+ -> SelPermInfo
+ -> BoolExp
+ -> (PGColType -> Value -> m S.SQLExp)
+ -> m (GBoolExp AnnSQLBoolExp)
+convBoolExp' cim tn spi be prepValBuilder = do
+ abe <- annBoolExp prepValBuilder cim be
+ modABE <- checkSelPerm spi abe
+ convBoolRhs binStrat (S.mkQual tn) modABE
+ where
+ binStrat = mkBoolExpBuilder return
+
+dmlTxErrorHandler :: Q.PGTxErr -> QErr
+dmlTxErrorHandler p2Res =
+ case err of
+ Nothing -> defaultTxErrorHandler p2Res
+ Just msg -> err400 PostgresError msg
+ where err = simplifyError p2Res
+
+-- | col_name as col_name
+mkColExtr :: (PGCol, PGColType) -> S.Extractor
+mkColExtr (c, pct) =
+ mkColExtrAl (Just c) (c, pct)
+
+mkColExtrAl :: (IsIden a) => Maybe a -> (PGCol, PGColType) -> S.Extractor
+mkColExtrAl alM colInfo =
+ S.mkAliasedExtrFromExp (mkColExp colInfo) alM
+
+mkColExp :: (PGCol, PGColType) -> S.SQLExp
+mkColExp (c, pct) =
+ if pct == PGGeometry || pct == PGGeography
+ then
+ (S.SEFnApp "ST_AsGeoJSON" [S.mkSIdenExp c] Nothing) `S.SETyAnn` "json"
+ else S.mkSIdenExp c
+
+-- validate headers
+validateHeaders :: (P1C m) => [T.Text] -> m ()
+validateHeaders depHeaders = do
+ headers <- (map fst) . userHeaders <$> askUserInfo
+ forM_ depHeaders $ \hdr ->
+ unless (hdr `elem` map T.toLower headers) $
+ throw400 NotFound $ hdr <<> " header is expected but not found"
+
+simplifyError :: Q.PGTxErr -> Maybe T.Text
+simplifyError txErr = do
+ stmtErr <- Q.getPGStmtErr txErr
+ codeMsg <- getPGCodeMsg stmtErr
+ extractError codeMsg
+ where
+ getPGCodeMsg pged =
+ (,) <$> Q.edStatusCode pged <*> Q.edMessage pged
+ extractError = \case
+ -- restrict violation
+ ("23501", msg) ->
+ return $ "Can not delete or update due to data being referred. " <> msg
+ -- not null violation
+ ("23502", msg) ->
+ return $ "Not-NULL violation. " <> msg
+ -- foreign key violation
+ ("23503", msg) ->
+ return $ "Foreign key violation. " <> msg
+ -- unique violation
+ ("23505", msg) ->
+ return $ "Uniqueness violation. " <> msg
+ -- check violation
+ ("23514", msg) ->
+ return $ "Check constraint violation. " <> msg
+ -- invalid text representation
+ ("22P02", msg) -> return msg
+ -- no unique constraint on the columns
+ ("42P10", _) ->
+ return "there is no unique or exclusion constraint on target column(s)"
+ -- no constraint
+ ("42704", msg) -> return msg
+ -- invalid parameter value
+ ("22023", msg) -> return msg
+ _ -> Nothing
diff --git a/server/src-lib/Hasura/RQL/DML/QueryTemplate.hs b/server/src-lib/Hasura/RQL/DML/QueryTemplate.hs
new file mode 100644
index 00000000..fc5e17b0
--- /dev/null
+++ b/server/src-lib/Hasura/RQL/DML/QueryTemplate.hs
@@ -0,0 +1,134 @@
+{-# LANGUAGE DeriveLift #-}
+{-# LANGUAGE FlexibleContexts #-}
+{-# LANGUAGE FlexibleInstances #-}
+{-# LANGUAGE OverloadedStrings #-}
+{-# LANGUAGE TemplateHaskell #-}
+{-# LANGUAGE TypeFamilies #-}
+
+module Hasura.RQL.DML.QueryTemplate where
+
+import Hasura.RQL.DDL.QueryTemplate
+import Hasura.RQL.DML.Internal
+import Hasura.RQL.DML.Returning (encodeJSONVector)
+import Hasura.RQL.GBoolExp (txtRHSBuilder)
+import Hasura.RQL.Instances ()
+import Hasura.RQL.Types
+import Hasura.SQL.Types
+import Hasura.Prelude
+
+import qualified Database.PG.Query as Q
+import qualified Hasura.RQL.DML.Count as R
+import qualified Hasura.RQL.DML.Delete as R
+import qualified Hasura.RQL.DML.Insert as R
+import qualified Hasura.RQL.DML.Select as R
+import qualified Hasura.RQL.DML.Update as R
+import qualified Hasura.SQL.DML as S
+
+import Data.Aeson.Casing
+import Data.Aeson.TH
+import Data.Aeson.Types
+import Instances.TH.Lift ()
+import Language.Haskell.TH.Syntax (Lift)
+
+import qualified Data.ByteString.Builder as BB
+import qualified Data.HashMap.Strict as M
+import qualified Data.Sequence as DS
+import qualified Data.Vector as V
+
+type TemplateArgs = M.HashMap TemplateParam Value
+
+data ExecQueryTemplate
+ = ExecQueryTemplate
+ { eqtName :: !TQueryName
+ , eqtArgs :: !TemplateArgs
+ } deriving (Show, Eq, Lift)
+
+$(deriveJSON (aesonDrop 3 snakeCase){omitNothingFields=True} ''ExecQueryTemplate)
+
+type EQTP1 = StateT (DS.Seq Q.PrepArg) P1
+
+getParamValue
+ :: TemplateArgs
+ -> TemplateParamConf
+ -> EQTP1 Value
+getParamValue params (TemplateParamConf paramName paramVal) =
+ maybe paramMissing return $ M.lookup paramName params <|> paramVal
+ where
+ paramMissing = throw400 InvalidParams $
+ "missing parameter : " <>> paramName
+
+data QueryTProc
+ = QTPInsert !(R.InsertQueryP1, DS.Seq Q.PrepArg)
+ | QTPSelect !(R.SelectData, DS.Seq Q.PrepArg)
+ | QTPUpdate !(R.UpdateQueryP1, DS.Seq Q.PrepArg)
+ | QTPDelete !(R.DeleteQueryP1, DS.Seq Q.PrepArg)
+ | QTPCount !(R.CountQueryP1, DS.Seq Q.PrepArg)
+ | QTPBulk ![QueryTProc]
+ deriving (Show, Eq)
+
+buildPrepArg
+ :: TemplateArgs
+ -> PGColType
+ -> Value
+ -> EQTP1 S.SQLExp
+buildPrepArg args pct val =
+ case val of
+ Object _ -> do
+ tpc <- decodeValue val
+ v <- getParamValue args tpc
+ modifyErr (withParamErrMsg tpc) $ binRHSBuilder pct v
+ _ -> txtRHSBuilder pct val
+ where
+ withParamErrMsg tpc t =
+ "when processing parameter " <> tpcParam tpc <<> " : " <> t
+
+convQT
+ :: (P1C m)
+ => TemplateArgs
+ -> QueryT
+ -> m QueryTProc
+convQT args qt = case qt of
+ QTInsert q -> fmap QTPInsert $ peelSt $
+ R.convInsertQuery decodeParam binRHSBuilder q
+ QTSelect q -> fmap QTPSelect $ peelSt $ R.convSelectQuery f q
+ QTUpdate q -> fmap QTPUpdate $ peelSt $ R.convUpdateQuery f q
+ QTDelete q -> fmap QTPDelete $ peelSt $ R.convDeleteQuery f q
+ QTCount q -> fmap QTPCount $ peelSt $ R.countP1 f q
+ QTBulk q -> fmap QTPBulk $ mapM (convQT args) q
+ where
+ decodeParam val = do
+ tpc <- decodeValue val
+ v <- getParamValue args tpc
+ R.decodeInsObjs v
+
+ f = buildPrepArg args
+ peelSt m = do
+ sc <- askSchemaCache
+ ui <- askUserInfo
+ liftEither $ runP1 (QCtx ui sc) $ runStateT m DS.empty
+
+execQueryTemplateP1 :: ExecQueryTemplate -> P1 QueryTProc
+execQueryTemplateP1 (ExecQueryTemplate qtn args) = do
+ (QueryTemplateInfo _ qt _) <- askQTemplateInfo qtn
+ convQT args qt
+
+execQueryTP2 :: (P2C m) => QueryTProc -> m RespBody
+execQueryTP2 qtProc = case qtProc of
+ QTPInsert qp -> liftTx $ R.insertP2 qp
+ QTPSelect qp -> liftTx $ R.selectP2 qp
+ QTPUpdate qp -> liftTx $ R.updateP2 qp
+ QTPDelete qp -> liftTx $ R.deleteP2 qp
+ QTPCount qp -> R.countP2 qp
+ QTPBulk qps -> do
+ respList <- mapM execQueryTP2 qps
+ let bsVector = V.fromList respList
+ return $ BB.toLazyByteString $ encodeJSONVector BB.lazyByteString bsVector
+
+instance HDBQuery ExecQueryTemplate where
+
+ type Phase1Res ExecQueryTemplate = QueryTProc
+ phaseOne = execQueryTemplateP1
+
+ phaseTwo _ = execQueryTP2
+
+ schemaCachePolicy = SCPNoChange
diff --git a/server/src-lib/Hasura/RQL/DML/Returning.hs b/server/src-lib/Hasura/RQL/DML/Returning.hs
new file mode 100644
index 00000000..989ffd9b
--- /dev/null
+++ b/server/src-lib/Hasura/RQL/DML/Returning.hs
@@ -0,0 +1,168 @@
+{-# LANGUAGE FlexibleContexts #-}
+{-# LANGUAGE FlexibleInstances #-}
+{-# LANGUAGE LambdaCase #-}
+{-# LANGUAGE OverloadedStrings #-}
+
+module Hasura.RQL.DML.Returning where
+
+import Hasura.RQL.DML.Internal
+import Hasura.Prelude
+import Hasura.RQL.Types
+import Hasura.SQL.Types
+
+import qualified Data.ByteString.Builder as BB
+import qualified Data.HashMap.Strict as Map
+import qualified Data.Text as T
+import qualified Data.Vector as V
+import qualified Hasura.SQL.DML as S
+
+data RetFld
+ = RExp !T.Text
+ | RCol (PGCol, PGColType)
+ deriving (Show, Eq)
+
+pgColsFromRetFld :: RetFld -> Maybe (PGCol, PGColType)
+pgColsFromRetFld = \case
+ RExp _ -> Nothing
+ RCol c -> Just c
+
+type RetFlds = Map.HashMap T.Text RetFld
+
+mkRetFlds :: [(PGCol, PGColType)] -> RetFlds
+mkRetFlds flds =
+ Map.fromList $ flip map flds $
+ \(c, ty) -> (getPGColTxt c, RCol (c, ty))
+
+mkRetFldsExp :: RetFlds -> S.SQLExp
+mkRetFldsExp retFlds =
+ S.mkRowExp $ flip map (Map.toList retFlds) $ \(k, retFld) ->
+ case retFld of
+ RExp t -> (k, S.SELit t)
+ RCol colInfo -> (k, mkColExp colInfo)
+
+data MutFld
+ = MCount
+ | MExp !T.Text
+ | MRet !RetFlds
+ deriving (Show, Eq)
+
+type MutFlds = Map.HashMap T.Text MutFld
+
+pgColsFromMutFld :: MutFld -> [(PGCol, PGColType)]
+pgColsFromMutFld = \case
+ MCount -> []
+ MExp _ -> []
+ MRet retFlds -> mapMaybe pgColsFromRetFld $ Map.elems retFlds
+
+pgColsFromMutFlds :: MutFlds -> [(PGCol, PGColType)]
+pgColsFromMutFlds = concatMap pgColsFromMutFld . Map.elems
+
+mkDefaultMutFlds :: Maybe [(PGCol, PGColType)] -> MutFlds
+mkDefaultMutFlds = \case
+ Nothing -> mutFlds
+ Just cols -> Map.insert "returning" (MRet $ mkRetFlds cols) mutFlds
+ where
+ mutFlds = Map.singleton "affected_rows" MCount
+
+mkMutFldExp :: MutFld -> S.SQLExp
+mkMutFldExp = \case
+ MCount -> S.SEUnsafe "count(*)"
+ MExp t -> S.SELit t
+ MRet retFlds -> S.SEFnApp "json_agg" [mkRetFldsExp retFlds] Nothing
+
+mkSelWith :: S.CTE -> MutFlds -> S.SelectWith
+mkSelWith cte mutFlds =
+ S.SelectWith [(alias, cte)] sel
+ where
+ alias = S.Alias $ toIden tableNameAlias
+ tableNameAlias = TableName "r"
+ sel = S.mkSelect { S.selExtr = [S.Extractor extrExp Nothing]
+ , S.selFrom = Just $ S.mkIdenFromExp tableNameAlias}
+
+ extrExp = S.SEFnApp "json_build_object" jsonBuildObjArgs Nothing
+
+ jsonBuildObjArgs =
+ flip concatMap (Map.toList mutFlds) $
+ \(k, mutFld) -> [S.SELit k, mkMutFldExp mutFld]
+
+encodeJSONVector :: (a -> BB.Builder) -> V.Vector a -> BB.Builder
+encodeJSONVector builder xs
+ | V.null xs = BB.char7 '[' <> BB.char7 ']'
+ | otherwise = BB.char7 '[' <> builder (V.unsafeHead xs) <>
+ V.foldr go (BB.char7 ']') (V.unsafeTail xs)
+ where go v b = BB.char7 ',' <> builder v <> b
+
+-- newtype RetRes = RetRes { getRetRes :: BL.ByteString }
+
+-- instance Q.FromRes RetRes where
+-- fromRes (Q.ResultOkEmpty _) =
+-- throwError "Expecting data. Instead, status is 'CommandOk'"
+
+-- fromRes (Q.ResultOkData pqRes) = do
+-- nc <- liftIO $ PQ.nfields pqRes
+
+-- -- We are only expecting tuples with single element
+-- unless (nc == 1) $
+-- throwError "select is expecting only 1 column in the result"
+
+-- -- Now get the number of rows
+-- nr <- liftIO $ PQ.ntuples pqRes
+
+-- -- comma separated value bulider
+-- valsBB <- rowLoop nr (getValue pqRes)
+
+-- return $ RetRes $ BB.toLazyByteString $ (BB.char7 '[' <> valsBB <> BB.char7 ']')
+
+-- getValue :: PQ.Result -> PQ.Row -> ExceptT String IO BB.Builder
+-- getValue res i = do
+-- bs <- liftIO $ PQ.getvalue res i 0
+-- case bs of
+-- Just bs' -> return $ BB.byteString bs'
+-- Nothing -> throwError "null encountered when processing select result"
+
+-- rowLoop :: PQ.Row -> (PQ.Row -> ExceptT String IO BB.Builder)
+-- -> ExceptT String IO BB.Builder
+-- rowLoop n f = loop (n - 1) mempty
+-- where
+-- loop !i !accum
+-- | i < 0 = return accum
+-- | i == 0 = do
+-- a <- f 0
+-- return (a <> accum)
+-- | otherwise = do
+-- a <- f i
+-- loop (i-1) (BB.char7 ',' <> a <> accum)
+
+checkRetCols
+ :: (P1C m)
+ => FieldInfoMap
+ -> SelPermInfo
+ -> [PGCol]
+ -> m [PGColType]
+checkRetCols fieldInfoMap selPermInfo cols = do
+ mapM_ (checkSelOnCol selPermInfo) cols
+ forM cols $ \col -> askPGType fieldInfoMap col relInRetErr
+ where
+ relInRetErr = "Relationships can't be used in \"returning\"."
+
+-- | Converts the given columns into :
+-- RETURNING row_to_json((SELECT r FROM (SELECT col1, col2, .. ) AS r))
+-- toRetExp :: [(PGCol, PGColType)] -> S.RetExp
+-- toRetExp cols =
+-- S.RetExp [S.Extractor jsonAgg Nothing]
+-- where
+-- -- row_to_json((SELECT r FROM (SELECT col1, col2, .. ) AS r))
+-- jsonAgg = S.SEFnApp "row_to_json" [S.mkRowExp $ map mkColExtr cols] Nothing
+
+-- encodeReturning :: Q.WithReturning (V.Vector (Identity BS.ByteString))
+-- -> BB.Builder
+-- encodeReturning (Q.WithReturning c mval) =
+-- BB.byteString "{\"affected_rows\":" <>
+-- BB.word64Dec c <> retBuilder <> BB.char7 '}'
+-- where
+-- retBuilder =
+-- case mval of
+-- Just ret ->
+-- BB.byteString ",\"returning\":" <>
+-- encodeJSONVector (BB.byteString . runIdentity) ret
+-- Nothing -> mempty
diff --git a/server/src-lib/Hasura/RQL/DML/Select.hs b/server/src-lib/Hasura/RQL/DML/Select.hs
new file mode 100644
index 00000000..82df197c
--- /dev/null
+++ b/server/src-lib/Hasura/RQL/DML/Select.hs
@@ -0,0 +1,693 @@
+{-# LANGUAGE DeriveLift #-}
+{-# LANGUAGE FlexibleContexts #-}
+{-# LANGUAGE FlexibleInstances #-}
+{-# LANGUAGE LambdaCase #-}
+{-# LANGUAGE OverloadedStrings #-}
+{-# LANGUAGE TypeFamilies #-}
+
+module Hasura.RQL.DML.Select where
+
+import Data.Aeson.Types
+import Data.List (unionBy)
+import Instances.TH.Lift ()
+import Language.Haskell.TH.Syntax (Lift)
+
+import qualified Data.HashMap.Strict as HM
+import qualified Data.HashSet as HS
+import qualified Data.Sequence as DS
+import qualified Data.Text as T
+
+import Hasura.Prelude
+import Hasura.RQL.DML.Internal
+import Hasura.RQL.GBoolExp
+import Hasura.RQL.Types
+import Hasura.SQL.Types
+
+import qualified Database.PG.Query as Q
+import qualified Hasura.SQL.DML as S
+
+-- Conversion of SelectQ happens in 2 Stages.
+-- Stage 1 : Convert input query into an annotated AST
+-- Stage 2 : Convert annotated AST to SQL Select
+
+type SelectQExt = SelectG ExtCol BoolExp
+-- Columns in RQL
+data ExtCol
+ = ECSimple !PGCol
+ | ECRel !RelName !(Maybe RelName) !SelectQExt
+ deriving (Show, Eq, Lift)
+
+instance ToJSON ExtCol where
+ toJSON (ECSimple s) = toJSON s
+ toJSON (ECRel rn mrn selq) =
+ object $ [ "name" .= rn
+ , "alias" .= mrn
+ ] ++ selectGToPairs selq
+
+instance FromJSON ExtCol where
+ parseJSON v@(Object o) =
+ ECRel
+ <$> o .: "name"
+ <*> o .:? "alias"
+ <*> parseJSON v
+ parseJSON (String s) =
+ return $ ECSimple $ PGCol s
+ parseJSON _ =
+ fail $ mconcat
+ [ "A column should either be a string or an "
+ , "object (relationship)"
+ ]
+
+data AnnRel = AnnRel
+ { arName :: !RelName -- Relationship name
+ , arType :: !RelType -- Relationship type (ObjRel, ArrRel)
+ , arMapping :: ![(PGCol, PGCol)] -- Column of the left table to join with
+ , arSelData :: !SelectData -- Current table. Almost ~ to SQL Select
+ } deriving (Show, Eq)
+
+data SelectData = SelectData
+ -- Nested annotated columns
+ { sdFlds :: !(HM.HashMap FieldName AnnFld)
+ , sdTable :: !QualifiedTable -- Postgres table name
+ , sdWhere :: !(S.BoolExp, Maybe (GBoolExp AnnSQLBoolExp))
+ , sdOrderBy :: !(Maybe S.OrderByExp)
+ , sdAddCols :: ![PGCol] -- additional order by columns
+ , sdLimit :: !(Maybe S.SQLExp)
+ , sdOffset :: !(Maybe S.SQLExp)
+ } deriving (Show, Eq)
+
+convSelCol :: (P1C m)
+ => FieldInfoMap
+ -> SelPermInfo
+ -> SelCol
+ -> m [ExtCol]
+convSelCol _ _ (SCExtSimple cn) =
+ return [ECSimple cn]
+convSelCol fieldInfoMap _ (SCExtRel rn malias selQ) = do
+ -- Point to the name key
+ let pgWhenRelErr = "only relationships can be expanded"
+ relInfo <- withPathK "name" $
+ askRelType fieldInfoMap rn pgWhenRelErr
+ let (RelInfo _ _ _ relTab _) = relInfo
+ (rfim, rspi) <- fetchRelDet rn relTab
+ resolvedSelQ <- resolveStar rfim rspi selQ
+ return [ECRel rn malias resolvedSelQ]
+convSelCol fieldInfoMap spi (SCStar wildcard) =
+ convWildcard fieldInfoMap spi wildcard
+
+convWildcard
+ :: (P1C m)
+ => FieldInfoMap
+ -> SelPermInfo
+ -> Wildcard
+ -> m [ExtCol]
+convWildcard fieldInfoMap (SelPermInfo cols _ _ _ _) wildcard =
+ case wildcard of
+ Star -> return simpleCols
+ (StarDot wc) -> (simpleCols ++) <$> (catMaybes <$> relExtCols wc)
+ where
+ (pgCols, relColInfos) = partitionFieldInfosWith (pgiName, id) $
+ HM.elems fieldInfoMap
+
+ simpleCols = map ECSimple $ filter (`HS.member` cols) pgCols
+
+ mkRelCol wc relInfo = do
+ let relName = riName relInfo
+ relTab = riRTable relInfo
+ relTabInfo <- fetchRelTabInfo relTab
+ mRelSelPerm <- askPermInfo' PASelect relTabInfo
+
+ case mRelSelPerm of
+ Nothing -> return Nothing
+ Just rspi -> do
+ rExtCols <- convWildcard (tiFieldInfoMap relTabInfo) rspi wc
+ return $ Just $ ECRel relName Nothing $
+ SelectG rExtCols Nothing Nothing Nothing Nothing
+
+ relExtCols wc = mapM (mkRelCol wc) relColInfos
+
+resolveStar :: (P1C m)
+ => FieldInfoMap
+ -> SelPermInfo
+ -> SelectQ
+ -> m SelectQExt
+resolveStar fim spi (SelectG selCols mWh mOb mLt mOf) = do
+ procOverrides <- fmap (concat . catMaybes) $ withPathK "columns" $
+ indexedForM selCols $ \selCol -> case selCol of
+ (SCStar _) -> return Nothing
+ _ -> Just <$> convSelCol fim spi selCol
+ everything <- case wildcards of
+ [] -> return []
+ _ -> convWildcard fim spi $ maximum wildcards
+ let extCols = unionBy equals procOverrides everything
+ return $ SelectG extCols mWh mOb mLt mOf
+ where
+ wildcards = lefts $ map mkEither selCols
+
+ mkEither (SCStar wc) = Left wc
+ mkEither selCol = Right selCol
+
+ equals (ECSimple x) (ECSimple y) = x == y
+ equals (ECRel x _ _) (ECRel y _ _) = x == y
+ equals _ _ = False
+
+data AnnFld
+ = FCol (PGCol, PGColType)
+ | FRel AnnRel
+ | FExp T.Text
+ deriving (Show, Eq)
+
+partAnnFlds
+ :: [AnnFld] -> ([(PGCol, PGColType)], [AnnRel])
+partAnnFlds flds =
+ partitionEithers $ catMaybes $ flip map flds $ \case
+ FCol c -> Just $ Left c
+ FRel r -> Just $ Right r
+ FExp _ -> Nothing
+
+
+processOrderByElem
+ :: (P1C m)
+ => HM.HashMap FieldName AnnFld
+ -> [T.Text]
+ -> m (HM.HashMap FieldName AnnFld)
+processOrderByElem _ [] =
+ withPathK "column" $ throw400 UnexpectedPayload "can't be empty"
+processOrderByElem annFlds [colTxt] =
+ case HM.lookup (FieldName colTxt) annFlds of
+ Just (FCol (_, ty)) -> if ty == PGGeography || ty == PGGeometry
+ then throw400 UnexpectedPayload $ mconcat
+ [ (PGCol colTxt) <<> " has type 'geometry'"
+ , " and cannot be used in order_by"
+ ]
+ else return annFlds
+ Just (FRel _) -> throw400 UnexpectedPayload $ mconcat
+ [ (PGCol colTxt) <<> " is a"
+ , " relationship and should be expanded"
+ ]
+ Just (FExp t) -> throw500 $
+ " found __typename in order_by?: " <> t
+ Nothing -> throw400 UnexpectedPayload $ mconcat
+ [ (PGCol colTxt) <<> " should be"
+ , " included in 'columns'"
+ ]
+processOrderByElem annFlds (colTxt:xs) =
+ case HM.lookup (FieldName colTxt) annFlds of
+ Just (FRel annRel) -> case arType annRel of
+ ObjRel -> do
+ let relSelData = arSelData annRel
+ relFlds = sdFlds relSelData
+ newRelFlds <- processOrderByElem relFlds xs
+ let newRelSelData = relSelData
+ { sdAddCols = (PGCol $ T.intercalate "__" xs):(sdAddCols relSelData)
+ , sdFlds = newRelFlds
+ }
+ newAnnRel = annRel { arSelData = newRelSelData }
+ return $ HM.insert (FieldName colTxt) (FRel newAnnRel) annFlds
+ ArrRel ->
+ throw400 UnexpectedPayload $ mconcat
+ [ (RelName colTxt) <<> " is an array relationship"
+ ," and can't be used in 'order_by'"
+ ]
+ Just (FCol _) -> throw400 UnexpectedPayload $ mconcat
+ [ (PGCol colTxt) <<> " is a Postgres column"
+ , " and cannot be chained further"
+ ]
+ Just (FExp t) -> throw500 $
+ " found __typename in order_by?: " <> t
+ Nothing -> throw400 UnexpectedPayload $ mconcat
+ [ (PGCol colTxt) <<> " should be"
+ , " included in 'columns'"
+ ]
+
+convOrderByItem :: OrderByItem -> S.OrderByItem
+convOrderByItem (OrderByItem ot (OrderByCol path) nulls) =
+ S.OrderByItem obiExp ot nulls
+ where
+ obiExp = Left $ PGCol $ T.intercalate "__" path
+
+convOrderByExp
+ :: (P1C m)
+ => OrderByExp
+ -> m S.OrderByExp
+convOrderByExp (OrderByExp obItems) = do
+ when (null obItems) $ throw400 UnexpectedPayload
+ "order_by array should not be empty"
+ return $
+ S.OrderByExp $ map convOrderByItem obItems
+
+partitionExtCols :: [ExtCol]
+ -> ([PGCol], [(RelName, Maybe RelName, SelectQExt)])
+partitionExtCols = foldr f ([], [])
+ where
+ f (ECSimple pgCol) ~(l, r) = (pgCol:l, r)
+ f (ECRel relName mAlias selQ) ~(l, r) = (l, (relName, mAlias, selQ):r)
+
+convSelectQ
+ :: (P1C m)
+ => FieldInfoMap -- Table information of current table
+ -> SelPermInfo -- Additional select permission info
+ -> SelectQExt -- Given Select Query
+ -> (PGColType -> Value -> m S.SQLExp)
+ -> m SelectData
+convSelectQ fieldInfoMap selPermInfo selQ prepValBuilder = do
+ -- let (extPGCols, extRels) = partitionExtCols $ sqColumns selQ
+
+ annFlds <- fmap HM.fromList $ withPathK "columns" $
+ indexedForM (sqColumns selQ) $ \case
+ (ECSimple pgCol) -> do
+ colTy <- convExtSimple fieldInfoMap selPermInfo pgCol
+ return (fromPGCol pgCol, FCol (pgCol, colTy))
+ (ECRel relName mAlias relSelQ) -> do
+ annRel <- convExtRel fieldInfoMap relName mAlias relSelQ prepValBuilder
+ return (fromRel $ fromMaybe relName mAlias, FRel annRel)
+
+ -- pgColTypes <- withPathK "columns" $
+ -- indexedForM extPGCols $ \extCol ->
+ -- convExtSimple fieldInfoMap selPermInfo extCol
+
+ -- let pgColMap = HM.fromList $ zip extPGCols pgColTypes
+
+ -- annRels <- withPathK "columns" $
+ -- indexedForM extRels $ \(relName, mAlias, extCol) -> do
+
+ -- let annRelMap = HM.fromList annRels
+ let spiT = spiTable selPermInfo
+
+ -- Convert where clause
+ wClause <- forM (sqWhere selQ) $ \be ->
+ withPathK "where" $
+ convBoolExp' fieldInfoMap spiT selPermInfo be prepValBuilder
+
+ newAnnFldsM <- forM (sqOrderBy selQ) $ \(OrderByExp obItems) ->
+ withPathK "order_by" $
+ indexedFoldM processOrderByElem annFlds $
+ map (getOrderByColPath . obiColumn) obItems
+
+ let newAnnFlds = fromMaybe annFlds newAnnFldsM
+
+ -- Convert order by
+ sqlOrderBy <- mapM convOrderByExp $ sqOrderBy selQ
+
+ -- convert limit expression
+ limitExp <- mapM (prepValBuilder PGBigInt) $ sqLimit selQ
+
+ -- convert offest value
+ offsetExp <- mapM (prepValBuilder PGBigInt) $ sqOffset selQ
+
+ return $ SelectData newAnnFlds (spiTable selPermInfo)
+ (spiFilter selPermInfo, wClause) sqlOrderBy [] limitExp offsetExp
+
+convExtSimple
+ :: (P1C m)
+ => FieldInfoMap
+ -> SelPermInfo
+ -> PGCol
+ -> m PGColType
+convExtSimple fieldInfoMap selPermInfo pgCol = do
+ checkSelOnCol selPermInfo pgCol
+ askPGType fieldInfoMap pgCol relWhenPGErr
+ where
+ relWhenPGErr = "relationships have to be expanded"
+
+convExtRel
+ :: (P1C m)
+ => FieldInfoMap
+ -> RelName
+ -> Maybe RelName
+ -> SelectQExt
+ -> (PGColType -> Value -> m S.SQLExp)
+ -> m AnnRel
+convExtRel fieldInfoMap relName mAlias selQ prepValBuilder = do
+ -- Point to the name key
+ relInfo <- withPathK "name" $
+ askRelType fieldInfoMap relName pgWhenRelErr
+ let (RelInfo _ relTy colMapping relTab _) = relInfo
+ (relCIM, relSPI) <- fetchRelDet relName relTab
+ selectData <- case relTy of
+ ObjRel ->
+ if misused
+ then throw400 UnexpectedPayload $ mconcat
+ [ "when selecting an 'obj_relationship' "
+ , "'where', 'order_by', 'limit' and 'offset' "
+ , " can't be used"
+ ]
+ else convSelectQ relCIM relSPI selQ prepValBuilder
+ ArrRel -> convSelectQ relCIM relSPI selQ prepValBuilder
+ return $ AnnRel (fromMaybe relName mAlias) relTy colMapping selectData
+ where
+ pgWhenRelErr = "only relationships can be expanded"
+ misused = or [ isJust (sqWhere selQ)
+ , isJust (sqLimit selQ)
+ , isJust (sqOffset selQ)
+ , isJust (sqOrderBy selQ)
+ ]
+
+-- SQL Generation helper functions
+----------------------------------
+
+-- | Lateral joins are different. For example
+-- A typical join looks like :
+-- FromExp1 JOIN FromExp2 ON (condition)
+--
+-- A lateral join is as follows :
+-- FromExp1 LATERAL JOIN FromExp2' ON (true)
+-- where condition exists inside FromExp2'
+
+joinSel :: S.Select -- ^ left Select expression
+ -> S.Select -- ^ right Select expression
+ -> S.FromExp -- ^ From expression
+joinSel leftSel rightSel =
+ S.FromExp [S.FIJoin $ S.JoinExpr lhsFI S.LeftOuter rhsFI joinCond]
+ where
+ lhsFI = S.mkSelFromExp False leftSel $ TableName "l"
+ rhsFI = S.mkSelFromExp True rightSel $ TableName "r"
+ joinCond = S.JoinOn $ S.BELit True
+
+-- | Injects lateral join condition into given Select expression
+
+injectJoinCond :: S.BoolExp -- ^ Join condition
+ -> S.BoolExp -- ^ Where condition
+ -> S.WhereFrag -- ^ New where frag
+injectJoinCond joinCond whereCond =
+ S.WhereFrag $ S.BEBin S.AndOp joinCond whereCond
+
+mkJoinCond :: AnnRel -> S.BoolExp
+mkJoinCond annRel =
+ foldr (S.BEBin S.AndOp) (S.BELit True) $ flip map colMapping $
+ \(lCol, rCol) -> S.BECompare S.SEQ (mkLJColFn lCol) (S.mkSIdenExp rCol)
+ where
+ colMapping = arMapping annRel
+ mkLJColFn = S.mkQIdenExp (TableName "l") . mkLJCol (arName annRel)
+
+-- | Generates SQL Exp of form
+--
+-- fn_name((SELECT r FROM (SELECT ext1, ext2 ..) as r))
+-- | |--------------------------|
+-- | | inner select |
+-- |-----------------------------------------|
+-- | outer select |
+--
+-- This is needed because
+--
+-- row_to_json(col1, col2)
+--
+-- would result in
+--
+-- { "f1" : v1, "f2" : v2 }
+--
+-- But,
+--
+-- row_to_json((SELECT r FROM (SELECT col1, col2) as r))
+--
+-- would result in
+--
+-- { "col1" : v1, "col2" : v2 }
+
+mkInnerSelExtr :: (FieldName, AnnFld) -> (T.Text, S.SQLExp)
+mkInnerSelExtr (alias, annFld) =
+ (getFieldNameTxt alias, colExp)
+ where
+ colExp = case annFld of
+ FCol (pgCol, _) -> S.mkQIdenExp (TableName "r") pgCol
+ FRel annRel -> S.mkQIdenExp (TableName "r") $ arName annRel
+ FExp t -> S.SELit t
+
+mkLJCol :: RelName -> PGCol -> PGCol
+mkLJCol (RelName rTxt) (PGCol cTxt) =
+ PGCol ("__l_" <> rTxt <> "_" <> cTxt)
+
+-- | Generates
+--
+-- IF (r.__r_col IS NULL) THEN 'null' ELSE row_to_json(..)
+mkObjRelExtr :: PGCol -> RelName -> [(T.Text, S.SQLExp)] -> S.Extractor
+mkObjRelExtr compCol relName flds =
+ let idCol = S.mkQIdenExp (TableName "r") compCol
+ rowExp = S.mkRowExp flds
+ -- objAgg = S.SEFnApp "row_to_json" [rowExp] Nothing
+ condExp = S.SECond (S.BENull idCol) (S.SELit "null") rowExp
+ in S.mkAliasedExtrFromExp condExp $ Just relName
+
+-- | Generates
+--
+-- IF (first(r.__r_col) IS NULL) THEN '[]' ELSE json_agg(..)
+mkArrRelExtr
+ :: (Maybe S.OrderByExp) -> PGCol
+ -> RelName -> [(T.Text, S.SQLExp)] -> S.Extractor
+mkArrRelExtr mOb compCol relName flds =
+ let refCol = S.SEFnApp "hdb_catalog.first"
+ [ S.mkQIdenExp (TableName "r") compCol ] Nothing
+ rowExp = S.mkRowExp flds
+ arrAgg = S.SEFnApp "json_agg" [rowExp] mOb
+ condExp = S.SECond (S.BENull refCol) (S.SELit "[]") arrAgg
+ in S.mkAliasedExtrFromExp condExp $ Just relName
+
+-- | Make order by extr
+mkOrderByColExtr :: RelName -> PGCol -> S.Extractor
+mkOrderByColExtr (RelName rTxt) t@(PGCol cTxt) =
+ S.mkAliasedExtrFromExp orderByCol $ Just alias
+ where
+ orderByCol = S.mkQIdenExp (TableName "r") t
+ alias = PGCol ( rTxt <> "__" <> cTxt)
+
+-- |
+mkLColExtrs :: AnnRel -> [S.Extractor]
+mkLColExtrs ar =
+ map (\lCol -> S.mkAliasedExtr lCol $ Just $ mkLJCol relName lCol) lCols
+ where
+ lCols = map fst $ arMapping ar
+ relName = arName ar
+
+-- |
+mkCompColAlias :: RelName -> PGCol -> PGCol
+mkCompColAlias relName rCol =
+ PGCol ("__r_" <> getRelTxt relName <> "_" <> getPGColTxt rCol)
+ -- TODO : exception prone, mapping should be nonempty list
+
+selDataToSQL :: [S.Extractor] -- ^ Parent's RCol
+ -> S.BoolExp -- ^ Join Condition if any
+ -> SelectData -- ^ Select data
+ -> S.Select -- ^ SQL Select (needs wrapping)
+selDataToSQL parRCols joinCond (SelectData annFlds tn (fltr, mWc) ob _ lt offst) =
+ let
+ (sCols, relCols) = partAnnFlds $ HM.elems annFlds
+ -- relCols = HM.elems relColsMap
+ childrenLCols = concatMap mkLColExtrs relCols
+ thisTableExtrs = parRCols
+ <> map mkColExtr sCols
+ -- <> (map mkOrderByColExtr obeCols)
+ <> childrenLCols
+
+ finalWC = S.BEBin S.AndOp fltr $ maybe (S.BELit True) cBoolExp mWc
+
+ -- Add order by if
+ -- limit or offset is used or when no relationships are requested
+ -- orderByExp = bool Nothing ob $ or [isJust lt, isJust offst, null relCols]
+ baseSel = S.mkSelect
+ { S.selExtr = thisTableExtrs
+ , S.selFrom = Just $ S.mkSimpleFromExp tn
+ , S.selWhere = Just $ injectJoinCond joinCond finalWC
+ }
+ joinedSel = foldr ($) baseSel $ map annRelColToSQL relCols
+ in
+ joinedSel { S.selOrderBy = ob
+ , S.selLimit = S.LimitExp <$> lt
+ , S.selOffset = S.OffsetExp <$> offst
+ }
+
+-- | Brings the left select columns into the scope of outer select
+-- If group by, then use first, else just qualify with l
+exposeLSelExtrs :: Bool -- is group by on outer select?
+ -> [S.Extractor] -- left select's extractors
+ -> [S.Extractor] -- extrs that can be used in outer select
+exposeLSelExtrs isGrpBy lExtrs =
+ -- TODO : This looks error prone. We'll definitely have
+ -- alised columns as extractors, but type system doesn't
+ -- guarantee it. Fix this.
+ map exposeLCol $ mapMaybe S.getExtrAlias lExtrs
+ where
+ toQual = S.QualIden . toIden
+ exposeLCol al@(S.Alias lCol) =
+ let qLCol = S.SEQIden $ S.QIden (toQual (TableName "l")) lCol
+ faLCol = S.SEFnApp "hdb_catalog.first" [qLCol] Nothing
+ in S.Extractor (bool qLCol faLCol isGrpBy) $ Just al
+
+-- | Generates
+--
+-- SELECT
+-- cols_of_left_sel,
+-- relationship_extr
+-- FROM
+-- left_sel as l
+-- {JOIN TYPE} generated_right_sel_from_sel_data as r
+-- ON {JOIN COND}
+-- {GROUP BY}?
+annRelColToSQL :: AnnRel
+ -> S.Select
+ -> S.Select
+annRelColToSQL ar leftSel =
+ let
+ selData = arSelData ar
+ relName = arName ar
+ joinCond = mkJoinCond ar
+ -- The column used to determine whether there the object is null
+ -- or array is empty
+ compCol = snd $ head $ arMapping ar
+ -- An alias for this
+ compColAlias = mkCompColAlias relName compCol
+ -- the comparison column should also be selected
+ rightSel = selDataToSQL [S.mkAliasedExtr compCol $ Just compColAlias] joinCond selData
+
+ allFlds = map mkInnerSelExtr (HM.toList $ sdFlds selData)
+ -- <> map mkInnerSelExtr (HM.keys $ sdRels selData)
+ -- Lateral joins left and right select
+ fromExp = joinSel leftSel rightSel
+ in case arType ar of
+ ObjRel ->
+ let
+ -- Current relationship's extractor, using row_to_json
+ relExtr = mkObjRelExtr compColAlias relName allFlds
+
+ -- Qualified left select's columns
+ qLSelCols = exposeLSelExtrs False $ S.selExtr leftSel
+
+ -- Relationship's order columns
+ relOrderByCols = map (mkOrderByColExtr relName) $ sdAddCols selData
+
+ in
+ S.mkSelect { S.selExtr = qLSelCols ++ relExtr:relOrderByCols
+ , S.selFrom = Just fromExp
+ }
+ ArrRel ->
+ let
+ -- Current relationship's extractor, using json_agg
+ -- Also add order by in the aggregation as postgres doesn't guarantee it
+ relExtr = mkArrRelExtr (qualifyOrderBy <$> sdOrderBy selData) compColAlias relName allFlds
+
+ -- Firstified left select's columns
+ qLSelCols = exposeLSelExtrs True $ S.selExtr leftSel
+
+
+ -- Group by exp to aggregate relationship as json_array
+ grpByExp = S.GroupByExp $ map (S.mkQIdenExp (TableName "l") . mkLJCol relName) $
+ map fst $ arMapping ar
+ in
+ S.mkSelect { S.selExtr = relExtr:qLSelCols
+ , S.selFrom = Just fromExp
+ , S.selGroupBy = Just grpByExp
+ }
+ where
+ qualifyOrderByItem (S.OrderByItem e t n) =
+ let qe = case e of
+ Left c -> Right $ S.mkQIden (TableName "r") c
+ Right c -> Right $ c
+ in S.OrderByItem qe t n
+ qualifyOrderBy (S.OrderByExp items) =
+ S.OrderByExp $ map qualifyOrderByItem items
+
+-- wrapFinalSel :: S.Select -> [ExtCol] -> S.Select
+-- wrapFinalSel initSel extCols =
+-- S.mkSelect
+-- { S.selExtr = [S.Extractor rowToJSONedCol Nothing]
+-- , S.selFrom = Just $ S.FromExp [S.mkSelFromExp False initSel (TableName "r")]
+-- }
+-- where
+-- rowExp = S.mkRowExp $ map toExtr extCols
+-- rowToJSONedCol = S.SEFnApp "coalesce"
+-- [ S.SEFnApp "json_agg" [rowExp] Nothing
+-- , S.SELit "[]"] Nothing
+-- toExtr (ECSimple pgCol) =
+-- S.mkAliasedExtrFromExp (S.mkQIdenExp (TableName "r") pgCol) $
+-- Just pgCol
+-- toExtr (ECRel relName mAlias _) =
+-- let rName = fromMaybe relName mAlias
+-- in S.mkAliasedExtrFromExp (S.mkQIdenExp (TableName "r") rName) $
+-- Just rName
+
+wrapFinalSel :: S.Select -> [(FieldName, AnnFld)] -> S.Select
+wrapFinalSel initSel extCols =
+ S.mkSelect
+ { S.selExtr = [S.Extractor rowToJSONedCol Nothing]
+ , S.selFrom = Just $ S.FromExp [S.mkSelFromExp False initSel (TableName "r")]
+ }
+ where
+ rowExp = S.mkRowExp $ map mkInnerSelExtr extCols
+ rowToJSONedCol = S.SEFnApp "coalesce"
+ [ S.SEFnApp "json_agg" [rowExp] Nothing
+ , S.SELit "[]"] Nothing
+
+getSelectDeps
+ :: SelectData
+ -> [SchemaDependency]
+getSelectDeps (SelectData flds tn (_, annWc) _ _ _ _) =
+ mkParentDep tn
+ : fromMaybe [] whereDeps
+ <> colDeps
+ <> relDeps
+ <> nestedDeps
+ where
+ (sCols, rCols) = partAnnFlds $ HM.elems flds
+ colDeps = map (mkColDep "untyped" tn . fst) sCols
+ relDeps = map (mkRelDep . arName) rCols
+ nestedDeps = concatMap (getSelectDeps . arSelData) rCols
+ whereDeps = getBoolExpDeps tn <$> annWc
+ mkRelDep rn =
+ SchemaDependency (SOTableObj tn (TORel rn)) "untyped"
+
+-- data SelectQueryP1
+-- = SelectQueryP1
+-- { sqp1Cols :: ![ExtCol]
+-- , sqp1Data :: !SelectData
+-- } deriving (Show, Eq)
+
+-- mkSQLSelect :: SelectQueryP1 -> S.Select
+-- mkSQLSelect (SelectQueryP1 extCols selData) =
+-- wrapFinalSel (selDataToSQL [] (S.BELit True) selData) extCols
+
+mkSQLSelect :: SelectData -> S.Select
+mkSQLSelect selData =
+ wrapFinalSel (selDataToSQL [] (S.BELit True) selData) $
+ HM.toList $ sdFlds selData
+
+-- convSelectQuery
+-- :: (P1C m)
+-- => (PGColType -> Value -> m S.SQLExp)
+-- -> SelectQuery
+-- -> m SelectQueryP1
+-- convSelectQuery prepArgBuilder (DMLQuery qt selQ) = do
+-- tabInfo <- withPathK "table" $ askTabInfo qt
+-- selPermInfo <- askSelPermInfo tabInfo
+-- extSelQ <- resolveStar (tiFieldInfoMap tabInfo) selPermInfo selQ
+-- let extCols = sqColumns extSelQ
+-- selData <- convSelectQ (tiFieldInfoMap tabInfo) selPermInfo extSelQ prepArgBuilder
+-- return $ SelectQueryP1 extCols selData
+
+convSelectQuery
+ :: (P1C m)
+ => (PGColType -> Value -> m S.SQLExp)
+ -> SelectQuery
+ -> m SelectData
+convSelectQuery prepArgBuilder (DMLQuery qt selQ) = do
+ tabInfo <- withPathK "table" $ askTabInfo qt
+ selPermInfo <- askSelPermInfo tabInfo
+ extSelQ <- resolveStar (tiFieldInfoMap tabInfo) selPermInfo selQ
+ validateHeaders $ spiRequiredHeaders selPermInfo
+ convSelectQ (tiFieldInfoMap tabInfo) selPermInfo extSelQ prepArgBuilder
+
+-- selectP2 :: (P2C m) => (SelectQueryP1, DS.Seq Q.PrepArg) -> m RespBody
+selectP2 :: (SelectData, DS.Seq Q.PrepArg) -> Q.TxE QErr RespBody
+selectP2 (sel, p) =
+ runIdentity . Q.getRow
+ <$> Q.rawQE dmlTxErrorHandler (Q.fromBuilder selectSQL) (toList p) True
+ where
+ selectSQL = toSQL $ mkSQLSelect sel
+
+instance HDBQuery SelectQuery where
+
+ -- type Phase1Res SelectQuery = (SelectQueryP1, DS.Seq Q.PrepArg)
+ type Phase1Res SelectQuery = (SelectData, DS.Seq Q.PrepArg)
+ phaseOne q = flip runStateT DS.empty $ convSelectQuery binRHSBuilder q
+
+ phaseTwo _ = liftTx . selectP2
+
+ schemaCachePolicy = SCPNoChange
diff --git a/server/src-lib/Hasura/RQL/DML/Update.hs b/server/src-lib/Hasura/RQL/DML/Update.hs
new file mode 100644
index 00000000..e3ad1a42
--- /dev/null
+++ b/server/src-lib/Hasura/RQL/DML/Update.hs
@@ -0,0 +1,185 @@
+{-# LANGUAGE FlexibleContexts #-}
+{-# LANGUAGE FlexibleInstances #-}
+{-# LANGUAGE OverloadedStrings #-}
+{-# LANGUAGE TypeFamilies #-}
+
+module Hasura.RQL.DML.Update where
+
+import Data.Aeson.Types
+import Instances.TH.Lift ()
+
+import qualified Data.HashMap.Strict as M
+import qualified Data.Sequence as DS
+
+import Hasura.Prelude
+import Hasura.RQL.DML.Internal
+import Hasura.RQL.DML.Returning
+import Hasura.RQL.GBoolExp
+import Hasura.RQL.Instances ()
+import Hasura.RQL.Types
+import Hasura.SQL.Types
+
+import qualified Database.PG.Query as Q
+import qualified Hasura.SQL.DML as S
+
+data UpdateQueryP1
+ = UpdateQueryP1
+ { uqp1Table :: !QualifiedTable
+ , uqp1SetExps :: ![(PGCol, S.SQLExp)]
+ , uqp1Where :: !(S.BoolExp, GBoolExp AnnSQLBoolExp)
+ , pqp1MutFlds :: !MutFlds
+ } deriving (Show, Eq)
+
+mkSQLUpdate
+ :: UpdateQueryP1 -> S.SelectWith
+mkSQLUpdate (UpdateQueryP1 tn setExps (permFltr, wc) mutFlds) =
+ mkSelWith (S.CTEUpdate update) mutFlds
+ where
+ update = S.SQLUpdate tn setExp Nothing tableFltr $ Just S.returningStar
+ setExp = S.SetExp $ map S.SetExpItem setExps
+ tableFltr = Just $ S.WhereFrag $ S.BEBin S.AndOp permFltr $ cBoolExp wc
+
+getUpdateDeps
+ :: UpdateQueryP1
+ -> [SchemaDependency]
+getUpdateDeps (UpdateQueryP1 tn setExps (_, wc) mutFlds) =
+ mkParentDep tn : colDeps <> whereDeps <> retDeps
+ where
+ colDeps = map (mkColDep "on_type" tn) $ fst $ unzip setExps
+ whereDeps = getBoolExpDeps tn wc
+ retDeps = map (mkColDep "untyped" tn . fst) $
+ pgColsFromMutFlds mutFlds
+
+convInc
+ :: (QErrM m)
+ => (PGColType -> Value -> m S.SQLExp)
+ -> PGCol
+ -> PGColType
+ -> Value
+ -> m (PGCol, S.SQLExp)
+convInc f col colType val = do
+ prepExp <- f colType val
+ return (col, S.SEOpApp "+" [S.mkSIdenExp col, prepExp])
+
+convMul
+ :: (QErrM m)
+ => (PGColType -> Value -> m S.SQLExp)
+ -> PGCol
+ -> PGColType
+ -> Value
+ -> m (PGCol, S.SQLExp)
+convMul f col colType val = do
+ prepExp <- f colType val
+ return (col, S.SEOpApp "*" [S.mkSIdenExp col, prepExp])
+
+convSet
+ :: (QErrM m)
+ => (PGColType -> Value -> m S.SQLExp)
+ -> PGCol
+ -> PGColType
+ -> Value
+ -> m (PGCol, S.SQLExp)
+convSet f col colType val = do
+ prepExp <- f colType val
+ return (col, prepExp)
+
+convDefault :: (Monad m) => PGCol -> PGColType -> () -> m (PGCol, S.SQLExp)
+convDefault col _ _ = return (col, S.SEUnsafe "DEFAULT")
+
+convOp
+ :: (UserInfoM m, QErrM m)
+ => FieldInfoMap
+ -> UpdPermInfo
+ -> [(PGCol, a)]
+ -> (PGCol -> PGColType -> a -> m (PGCol, S.SQLExp))
+ -> m [(PGCol, S.SQLExp)]
+convOp fieldInfoMap updPerm objs conv =
+ forM objs $ \(pgCol, a) -> do
+ checkPermOnCol PTUpdate allowedCols pgCol
+ colType <- askPGType fieldInfoMap pgCol relWhenPgErr
+ res <- conv pgCol colType a
+ -- build a set expression's entry
+ withPathK (getPGColTxt pgCol) $ return res
+ where
+ allowedCols = upiCols updPerm
+ relWhenPgErr = "relationships can't be updated"
+
+convUpdateQuery
+ :: (P1C m)
+ => (PGColType -> Value -> m S.SQLExp)
+ -> UpdateQuery
+ -> m UpdateQueryP1
+convUpdateQuery f uq = do
+ let tableName = uqTable uq
+ tableInfo <- withPathK "table" $ askTabInfo tableName
+
+ -- Check if the role has update permissions
+ updPerm <- askUpdPermInfo tableInfo
+
+ -- Check if all dependent headers are present
+ validateHeaders $ upiRequiredHeaders updPerm
+
+ -- Check if select is allowed
+ selPerm <- modifyErr (<> selNecessaryMsg) $
+ askSelPermInfo tableInfo
+
+ let fieldInfoMap = tiFieldInfoMap tableInfo
+
+ -- convert the object to SQL set expression
+ setItems <- withPathK "$set" $
+ convOp fieldInfoMap updPerm (M.toList $ uqSet uq) $ convSet f
+
+ incItems <- withPathK "$inc" $
+ convOp fieldInfoMap updPerm (M.toList $ uqInc uq) $ convInc f
+
+ mulItems <- withPathK "$mul" $
+ convOp fieldInfoMap updPerm (M.toList $ uqMul uq) $ convMul f
+
+ defItems <- withPathK "$default" $
+ convOp fieldInfoMap updPerm (zip (uqDefault uq) [()..]) convDefault
+
+ -- convert the returning cols into sql returing exp
+ mAnnRetCols <- forM mRetCols $ \retCols ->
+ withPathK "returning" $ fmap (zip retCols) $
+ checkRetCols fieldInfoMap selPerm retCols
+
+ let setExpItems = setItems ++ incItems ++ mulItems ++ defItems
+ updTable = upiTable updPerm
+
+ when (null setExpItems) $
+ throw400 UnexpectedPayload "atleast one of $set, $inc, $mul has to be present"
+
+ -- convert the where clause
+ annSQLBoolExp <- withPathK "where" $
+ convBoolExp' fieldInfoMap updTable selPerm (uqWhere uq) f
+
+ return $ UpdateQueryP1
+ tableName
+ setExpItems
+ (upiFilter updPerm, annSQLBoolExp)
+ (mkDefaultMutFlds mAnnRetCols)
+ where
+ mRetCols = uqReturning uq
+ selNecessaryMsg =
+ "; \"update\" is only allowed if the role "
+ <> "has \"select\" permission as \"where\" can't be used "
+ <> "without \"select\" permission on the table"
+
+convUpdQ :: UpdateQuery -> P1 (UpdateQueryP1, DS.Seq Q.PrepArg)
+convUpdQ updQ = flip runStateT DS.empty $ convUpdateQuery binRHSBuilder updQ
+
+updateP2 :: (UpdateQueryP1, DS.Seq Q.PrepArg) -> Q.TxE QErr RespBody
+updateP2 (u, p) =
+ runIdentity . Q.getRow
+ <$> Q.rawQE dmlTxErrorHandler (Q.fromBuilder updateSQL) (toList p) True
+ where
+ updateSQL = toSQL $ mkSQLUpdate u
+
+instance HDBQuery UpdateQuery where
+
+ type Phase1Res UpdateQuery = (UpdateQueryP1, DS.Seq Q.PrepArg)
+ phaseOne = convUpdQ
+
+ phaseTwo _ = liftTx . updateP2
+
+ schemaCachePolicy = SCPNoChange
diff --git a/server/src-lib/Hasura/RQL/GBoolExp.hs b/server/src-lib/Hasura/RQL/GBoolExp.hs
new file mode 100644
index 00000000..d308cc65
--- /dev/null
+++ b/server/src-lib/Hasura/RQL/GBoolExp.hs
@@ -0,0 +1,486 @@
+{-# LANGUAGE FlexibleContexts #-}
+{-# LANGUAGE FlexibleInstances #-}
+{-# LANGUAGE LambdaCase #-}
+{-# LANGUAGE MultiWayIf #-}
+{-# LANGUAGE OverloadedStrings #-}
+{-# LANGUAGE TypeSynonymInstances #-}
+
+module Hasura.RQL.GBoolExp where
+
+import Hasura.Prelude
+import Hasura.RQL.Types
+import Hasura.SQL.Types
+import Hasura.SQL.Value
+
+import qualified Hasura.SQL.DML as S
+
+import Data.Aeson
+
+import qualified Data.HashMap.Strict as M
+import qualified Data.Text.Extended as T
+
+data AnnValOpExpG a
+ = AEQ !a
+ | ANE !a
+
+ | AIN ![a]
+ | ANIN ![a]
+
+ | AGT !a
+ | ALT !a
+ | AGTE !a
+ | ALTE !a
+
+ | ALIKE !a -- LIKE
+ | ANLIKE !a -- NOT LIKE
+
+ | AILIKE !a -- ILIKE, case insensitive
+ | ANILIKE !a-- NOT ILIKE, case insensitive
+
+ | ASIMILAR !a -- similar, regex
+ | ANSIMILAR !a-- not similar, regex
+
+ deriving (Eq, Show)
+
+data OpExpG a
+ = OEVal !(AnnValOpExpG a)
+ | OECol !ColOp !PGCol
+ deriving (Show, Eq)
+
+type OpExpJ = OpExpG Value
+type OpExp = OpExpG (PGColType, PGColValue)
+
+data AnnValG a
+ = AVCol !PGColInfo !a
+ | AVRel !RelInfo !(GBoolExp (AnnValG a)) S.BoolExp
+ deriving (Show, Eq)
+
+type AnnValS = AnnValG [OpExpG S.SQLExp]
+type AnnValO a = AnnValG [OpExpG a]
+type AnnVal = AnnValO (PGColType, PGColValue)
+
+type AnnValJ = AnnValG [OpExpJ]
+
+type AnnSQLBoolExp = AnnValG S.BoolExp
+
+data ColOp
+ = CEQ
+ | CNE
+ | CGT
+ | CLT
+ | CGTE
+ | CLTE
+ deriving (Eq)
+
+instance Show ColOp where
+ show CEQ = "$ceq"
+ show CNE = "$cne"
+
+ show CGT = "$cgt"
+ show CLT = "$clt"
+ show CGTE = "$cgte"
+ show CLTE = "$clte"
+
+data RQLOp
+ = REQ -- equals
+ | RNE -- <>
+
+ | RIN -- in an array
+ | RNIN -- not in an array
+
+ | RGT -- >
+ | RLT -- <
+ | RGTE -- >=
+ | RLTE -- <=
+
+ | RLIKE -- LIKE
+ | RNLIKE -- NOT LIKE
+
+ | RILIKE -- ILIKE, case insensitive
+ | RNILIKE -- NOT ILIKE, case insensitive
+
+ | RSIMILAR -- similar, regex
+ | RNSIMILAR -- not similar, regex
+
+ deriving (Eq)
+
+instance Show RQLOp where
+ show REQ = "$eq"
+ show RNE = "$ne"
+
+ show RIN = "$in"
+ show RNIN = "$nin"
+
+ show RGT = "$gt"
+ show RLT = "$lt"
+ show RGTE = "$gte"
+ show RLTE = "$lte"
+
+ show RLIKE = "$like"
+ show RNLIKE = "$nlike"
+
+ show RILIKE = "$ilike"
+ show RNILIKE = "$nilike"
+
+ show RSIMILAR = "$similar"
+ show RNSIMILAR = "$nsimilar"
+
+instance DQuote RQLOp where
+ dquoteTxt op = T.pack $ show op
+
+parseOp :: (MonadError QErr m) => T.Text -> m (Either RQLOp ColOp)
+parseOp opStr = case opStr of
+ "$eq" -> return $ Left REQ
+ "_eq" -> return $ Left REQ
+ "$ne" -> return $ Left RNE
+ "_ne" -> return $ Left RNE
+ "$neq" -> return $ Left RNE
+ "_neq" -> return $ Left RNE
+
+ "$in" -> return $ Left RIN
+ "_in" -> return $ Left RIN
+ "$nin" -> return $ Left RNIN
+ "_nin" -> return $ Left RNIN
+
+ "$gt" -> return $ Left RGT
+ "_gt" -> return $ Left RGT
+ "$lt" -> return $ Left RLT
+ "_lt" -> return $ Left RLT
+ "$gte" -> return $ Left RGTE
+ "_gte" -> return $ Left RGTE
+ "$lte" -> return $ Left RLTE
+ "_lte" -> return $ Left RLTE
+
+ "$like" -> return $ Left RLIKE
+ "_like" -> return $ Left RLIKE
+ "$nlike" -> return $ Left RNLIKE
+ "_nlike" -> return $ Left RNLIKE
+
+ "$ilike" -> return $ Left RILIKE
+ "_ilike" -> return $ Left RILIKE
+ "$nilike" -> return $ Left RNILIKE
+ "_nilike" -> return $ Left RNILIKE
+
+ "$similar" -> return $ Left RSIMILAR
+ "_similar" -> return $ Left RSIMILAR
+ "$nsimilar" -> return $ Left RNSIMILAR
+ "_nsimilar" -> return $ Left RNSIMILAR
+
+ "$ceq" -> return $ Right CEQ
+ "_ceq" -> return $ Right CEQ
+ "$cne" -> return $ Right CNE
+ "_cne" -> return $ Right CNE
+ "$cneq" -> return $ Right CNE
+ "_cneq" -> return $ Right CNE
+
+ "$cgt" -> return $ Right CGT
+ "_cgt" -> return $ Right CGT
+ "$clt" -> return $ Right CLT
+ "_clt" -> return $ Right CLT
+ "$cgte" -> return $ Right CGTE
+ "_cgte" -> return $ Right CGTE
+ "$clte" -> return $ Right CLTE
+ "_clte" -> return $ Right CLTE
+
+ x -> throw400 UnexpectedPayload $ "Unknown operator : " <> x
+
+isRQLOp :: T.Text -> Bool
+isRQLOp t = case runIdentity . runExceptT $ parseOp t of
+ Left _ -> False
+ Right r -> either (const True) (const False) r
+
+type ValueParser m a = PGColType -> Value -> m a
+
+parseAnnOpExpG
+ :: (MonadError QErr m)
+ => (PGColType -> Value -> m a)
+ -> RQLOp -> PGColType -> Value -> m (AnnValOpExpG a)
+parseAnnOpExpG parser op ty val = case op of
+ REQ -> AEQ <$> parseOne -- equals
+ RNE -> ANE <$> parseOne -- <>
+ RIN -> AIN <$> parseMany -- in an array
+ RNIN -> ANIN <$> parseMany -- not in an array
+ RGT -> AGT <$> parseOne -- >
+ RLT -> ALT <$> parseOne -- <
+ RGTE -> AGTE <$> parseOne -- >=
+ RLTE -> ALTE <$> parseOne -- <=
+ RLIKE -> ALIKE <$> parseOne -- LIKE
+ RNLIKE -> ANLIKE <$> parseOne -- NOT LIKE
+ RILIKE -> AILIKE <$> parseOne -- ILIKE, case insensitive
+ RNILIKE -> ANILIKE <$> parseOne -- NOT ILIKE, case insensitive
+ RSIMILAR -> ASIMILAR <$> parseOne -- similar, regex
+ RNSIMILAR -> ANSIMILAR <$> parseOne -- not similar, regex
+ where
+ parseOne = parser ty val
+ -- runAesonParser (parsePGValue ty) val
+ parseMany = do
+ vals <- runAesonParser parseJSON val
+ indexedForM vals (parser ty)
+
+parseOpExps
+ :: (MonadError QErr m)
+ => ValueParser m a
+ -> FieldInfoMap
+ -> PGColInfo
+ -> Value
+ -> m [OpExpG a]
+parseOpExps valParser cim (PGColInfo cn colTy) (Object o) =
+ forM (M.toList o) $ \(k, v) -> do
+ op <- parseOp k
+ case (op, v) of
+ (Left rqlOp, _) -> do
+ modifyErr (cn <<>) $ getOpTypeChecker rqlOp colTy
+ annValOp <- withPathK (T.pack $ show rqlOp) $
+ parseAnnOpExpG valParser rqlOp colTy v
+ return $ OEVal annValOp
+ (Right colOp, String c) -> do
+ let pgCol = PGCol c
+ errMsg = "column operators can only compare postgres columns"
+ rhsType <- askPGType cim pgCol errMsg
+ when (colTy /= rhsType) $
+ throw400 UnexpectedPayload $
+ "incompatible column types : " <> cn <<> ", " <>> pgCol
+ return $ OECol colOp pgCol
+ (Right _, _) -> throw400 UnexpectedPayload "expecting a string for column operator"
+parseOpExps valParser _ (PGColInfo _ colTy) val = do
+ annValOp <- parseAnnOpExpG valParser REQ colTy val
+ return [OEVal annValOp]
+
+buildMsg :: PGColType -> [PGColType] -> QErr
+buildMsg ty expTys =
+ err400 UnexpectedPayload $ mconcat
+ [ " is of type " <> T.pack (show ty)
+ , "; this operator works "
+ , "only on columns of type "
+ , T.intercalate "/" $ map (T.dquote . T.pack . show) expTys
+ ]
+
+type OpTypeChecker m = PGColType -> m ()
+
+textOnlyOp :: (MonadError QErr m) => OpTypeChecker m
+textOnlyOp PGText = return ()
+textOnlyOp PGVarchar = return ()
+textOnlyOp ty =
+ throwError $ buildMsg ty [PGVarchar, PGText]
+
+validOnAllTypes :: (MonadError QErr m) => OpTypeChecker m
+validOnAllTypes _ = return ()
+
+getOpTypeChecker :: (MonadError QErr m) => RQLOp -> OpTypeChecker m
+getOpTypeChecker REQ = validOnAllTypes
+getOpTypeChecker RNE = validOnAllTypes
+getOpTypeChecker RIN = validOnAllTypes
+getOpTypeChecker RNIN = validOnAllTypes
+getOpTypeChecker RGT = validOnAllTypes
+getOpTypeChecker RLT = validOnAllTypes
+getOpTypeChecker RGTE = validOnAllTypes
+getOpTypeChecker RLTE = validOnAllTypes
+getOpTypeChecker RLIKE = textOnlyOp
+getOpTypeChecker RNLIKE = textOnlyOp
+getOpTypeChecker RILIKE = textOnlyOp
+getOpTypeChecker RNILIKE = textOnlyOp
+getOpTypeChecker RSIMILAR = textOnlyOp
+getOpTypeChecker RNSIMILAR = textOnlyOp
+
+-- This convoluted expression instead of col = val
+-- to handle the case of col : null
+equalsBoolExpBuilder :: S.SQLExp -> S.SQLExp -> S.BoolExp
+equalsBoolExpBuilder qualColExp rhsExp =
+ S.BEBin S.OrOp (S.BECompare S.SEQ qualColExp rhsExp)
+ (S.BEBin S.AndOp
+ (S.BENull qualColExp)
+ (S.BENull rhsExp))
+
+notEqualsBoolExpBuilder :: S.SQLExp -> S.SQLExp -> S.BoolExp
+notEqualsBoolExpBuilder qualColExp rhsExp =
+ S.BEBin S.OrOp (S.BECompare S.SNE qualColExp rhsExp)
+ (S.BEBin S.AndOp
+ (S.BENotNull qualColExp)
+ (S.BENull rhsExp))
+
+mapBoolExp :: (Monad m)
+ => (a -> m b)
+ -> GBoolExp a -> m (GBoolExp b)
+mapBoolExp f (BoolAnd bes) = BoolAnd <$> mapM (mapBoolExp f) bes
+mapBoolExp f (BoolOr bes) = BoolOr <$> mapM (mapBoolExp f) bes
+mapBoolExp f (BoolCol ce) = BoolCol <$> f ce
+mapBoolExp f (BoolNot notExp) = BoolNot <$> mapBoolExp f notExp
+
+annBoolExp
+ :: (QErrM m, CacheRM m)
+ => ValueParser m a
+ -> FieldInfoMap
+ -> GBoolExp ColExp
+ -> m (GBoolExp (AnnValG [OpExpG a]))
+annBoolExp valParser cim = \case
+ (BoolAnd bes) -> BoolAnd <$> mapM (annBoolExp valParser cim) bes
+ (BoolOr bes) -> BoolOr <$> mapM (annBoolExp valParser cim) bes
+ (BoolCol ce) -> BoolCol <$> annColExp valParser cim ce
+ (BoolNot notExp) -> BoolNot <$> annBoolExp valParser cim notExp
+
+annColExp
+ :: (QErrM m, CacheRM m)
+ => ValueParser m a
+ -> FieldInfoMap
+ -> ColExp
+ -> m (AnnValG [OpExpG a])
+annColExp valueParser colInfoMap (ColExp fieldName colVal) = do
+ colInfo <- askFieldInfo colInfoMap fieldName
+ case colInfo of
+ FIColumn (PGColInfo _ PGJSON) ->
+ throwError (err400 UnexpectedPayload "JSON column can not be part of where clause")
+ FIColumn (PGColInfo _ PGJSONB) ->
+ throwError (err400 UnexpectedPayload "JSONB column can not be part of where clause")
+ FIColumn pgi ->
+ AVCol pgi <$> parseOpExps valueParser colInfoMap pgi colVal
+ FIRelationship relInfo -> do
+ relBoolExp <- decodeValue colVal
+ relFieldInfoMap <- askFieldInfoMap $ riRTable relInfo
+ annRelBoolExp <- annBoolExp valueParser relFieldInfoMap relBoolExp
+ return $ AVRel relInfo annRelBoolExp $ S.BELit True
+
+type BoolExpBuilder m a = S.SQLExp -> AnnValOpExpG a -> m S.BoolExp
+
+convBoolRhs
+ :: (Monad m)
+ => BoolExpBuilder m a -> S.Qual
+ -> GBoolExp (AnnValO a) -> m (GBoolExp AnnSQLBoolExp)
+convBoolRhs vp tq =
+ traverse (convColRhs vp tq )
+
+convColRhs
+ :: (Monad m)
+ => BoolExpBuilder m a
+ -> S.Qual -> AnnValO a -> m (AnnValG S.BoolExp)
+convColRhs bExpBuilder tableQual annVal = case annVal of
+ AVCol pci@(PGColInfo cn _) opExps -> do
+ let qualColExp = S.SEQIden $ S.QIden tableQual (toIden cn)
+ bExps <- forM opExps $ \case
+ OEVal annOpValExp -> bExpBuilder qualColExp annOpValExp
+ OECol op rCol -> do
+ let rhsColExp = S.SEQIden $ S.QIden tableQual (toIden rCol)
+ return $ mkColOpSQLExp op qualColExp rhsColExp
+ -- And them all
+ return $ AVCol pci $ foldr (S.BEBin S.AndOp) (S.BELit True) bExps
+
+ AVRel ri@(RelInfo _ _ colMapping relTN _) nesAnn fltr -> do
+ -- Convert the where clause on the relationship
+ annRelBoolExp <- convBoolRhs bExpBuilder (S.mkQual relTN) nesAnn
+ let backCompExp = foldr (S.BEBin S.AndOp) (S.BELit True) $
+ flip map colMapping $ \(lCol, rCol) ->
+ S.BECompare S.SEQ (S.mkSIdenExp rCol)
+ (S.SEQIden $ S.QIden tableQual (toIden lCol))
+ return $ AVRel ri annRelBoolExp $ S.BEBin S.AndOp fltr backCompExp
+
+cBoolExp
+ :: GBoolExp AnnSQLBoolExp
+ -> S.BoolExp
+cBoolExp be =
+ runIdentity $ flip foldBoolExp be $ \ace ->
+ return $ cColExp ace
+
+cColExp
+ :: AnnSQLBoolExp
+ -> S.BoolExp
+cColExp annVal = case annVal of
+ AVCol _ be -> be
+ AVRel (RelInfo _ _ _ relTN _) nesAnn backCompExp -> do
+ -- Convert the where clause on the relationship
+ let annRelBoolExp = cBoolExp nesAnn
+ innerBoolExp = S.BEBin S.AndOp backCompExp annRelBoolExp
+ S.mkExists relTN innerBoolExp
+
+inBoolExpBuilder :: S.SQLExp -> [S.SQLExp] -> S.BoolExp
+inBoolExpBuilder qualColExp rhsExps =
+ foldr (S.BEBin S.OrOp) (S.BELit False) eqExps
+ where
+ eqExps = map (equalsBoolExpBuilder qualColExp) rhsExps
+
+-- txtValParser
+-- :: (MonadError QErr m)
+-- => ValueParser m (AnnValOpExpG S.SQLExp)
+-- txtValParser =
+-- undefined
+
+pgValParser
+ :: (MonadError QErr m)
+ => PGColType -> Value -> m PGColValue
+pgValParser ty =
+ runAesonParser (parsePGValue ty)
+
+txtRHSBuilder
+ :: (MonadError QErr m)
+ => PGColType -> Value -> m S.SQLExp
+txtRHSBuilder ty val =
+ txtEncoder <$> pgValParser ty val
+
+-- this does not parse the value
+noValParser
+ :: (MonadError QErr m)
+ => ValueParser m Value
+noValParser _ = return
+
+-- binExpBuilder
+-- :: (Monad m)
+-- => BoolExpBuilder m PGColValue
+-- binExpBuilder =
+-- mkBoolExpBuilder
+
+mkBoolExpBuilder
+ :: (Monad m)
+ => (a -> m S.SQLExp)
+ -> BoolExpBuilder m a
+mkBoolExpBuilder rhsBldr lhs = \case
+ AEQ val -> mkSimpleBoolExpBuilder equalsBoolExpBuilder val
+ ANE val -> mkSimpleBoolExpBuilder notEqualsBoolExpBuilder val
+ AIN vals -> mkInOrNotBoolExpBuilder True vals
+ ANIN vals -> mkInOrNotBoolExpBuilder False vals
+ AGT val -> mkSimpleBoolExpBuilder (S.BECompare S.SGT) val
+ ALT val -> mkSimpleBoolExpBuilder (S.BECompare S.SLT) val
+ AGTE val -> mkSimpleBoolExpBuilder (S.BECompare S.SGTE) val
+ ALTE val -> mkSimpleBoolExpBuilder (S.BECompare S.SLTE) val
+ ALIKE val -> mkSimpleBoolExpBuilder (S.BECompare S.SLIKE) val
+ ANLIKE val -> mkSimpleBoolExpBuilder (S.BECompare S.SNLIKE) val
+ AILIKE val -> mkSimpleBoolExpBuilder (S.BECompare S.SILIKE) val
+ ANILIKE val -> mkSimpleBoolExpBuilder (S.BECompare S.SNILIKE) val
+ ASIMILAR val -> mkSimpleBoolExpBuilder (S.BECompare S.SSIMILAR) val
+ ANSIMILAR val -> mkSimpleBoolExpBuilder (S.BECompare S.SNSIMILAR) val
+ where
+ mkSimpleBoolExpBuilder beF pgColVal =
+ beF lhs <$> rhsBldr pgColVal
+
+ mkInOrNotBoolExpBuilder isIn arrVals = do
+ rhsExps <- mapM rhsBldr arrVals
+ let boolExp = inBoolExpBuilder lhs rhsExps
+ return $ bool (S.BENot boolExp) boolExp isIn
+
+-- txtRHSBuilder :: (MonadError QErr m) => RHSBuilder m
+-- txtRHSBuilder colType = runAesonParser (convToTxt colType)
+
+mkColOpSQLExp :: ColOp -> S.SQLExp -> S.SQLExp -> S.BoolExp
+mkColOpSQLExp colOp =
+ case colOp of
+ CEQ -> S.BECompare S.SEQ
+ CNE -> S.BECompare S.SNE
+ CGT -> S.BECompare S.SGT
+ CLT -> S.BECompare S.SLT
+ CGTE -> S.BECompare S.SGTE
+ CLTE -> S.BECompare S.SLTE
+
+getColExpDeps :: QualifiedTable -> AnnValG a -> [SchemaDependency]
+getColExpDeps tn (AVCol pgCI _) =
+ [SchemaDependency (SOTableObj tn (TOCol $ pgiName pgCI)) "on_type"]
+getColExpDeps tn (AVRel relInfo nesAnn _) =
+ pd : getBoolExpDeps (riRTable relInfo) nesAnn
+ where
+ pd = SchemaDependency (SOTableObj tn (TORel $ riName relInfo)) "on_type"
+
+getBoolExpDeps :: QualifiedTable -> GBoolExp (AnnValG a) -> [SchemaDependency]
+getBoolExpDeps tn (BoolAnd exps) =
+ mconcat $ map (getBoolExpDeps tn) exps
+getBoolExpDeps tn (BoolOr exps) =
+ mconcat $ map (getBoolExpDeps tn) exps
+getBoolExpDeps tn (BoolCol colExp) =
+ getColExpDeps tn colExp
+getBoolExpDeps tn (BoolNot notExp) =
+ getBoolExpDeps tn notExp
diff --git a/server/src-lib/Hasura/RQL/Instances.hs b/server/src-lib/Hasura/RQL/Instances.hs
new file mode 100644
index 00000000..906f5ee7
--- /dev/null
+++ b/server/src-lib/Hasura/RQL/Instances.hs
@@ -0,0 +1,14 @@
+{-# LANGUAGE TemplateHaskell #-}
+{-# OPTIONS_GHC -fno-warn-orphans #-}
+
+module Hasura.RQL.Instances where
+
+import Hasura.Prelude
+
+import Instances.TH.Lift ()
+import qualified Language.Haskell.TH.Syntax as TH
+
+import qualified Data.HashMap.Strict as M
+
+instance (TH.Lift k, TH.Lift v) => TH.Lift (M.HashMap k v) where
+ lift m = [| M.fromList $(TH.lift $ M.toList m) |]
diff --git a/server/src-lib/Hasura/RQL/Types.hs b/server/src-lib/Hasura/RQL/Types.hs
new file mode 100644
index 00000000..4c3245e0
--- /dev/null
+++ b/server/src-lib/Hasura/RQL/Types.hs
@@ -0,0 +1,283 @@
+{-# LANGUAGE ConstraintKinds #-}
+{-# LANGUAGE FlexibleContexts #-}
+{-# LANGUAGE FlexibleInstances #-}
+{-# LANGUAGE MultiParamTypeClasses #-}
+{-# LANGUAGE OverloadedStrings #-}
+{-# LANGUAGE TypeFamilies #-}
+
+module Hasura.RQL.Types
+ ( HasSchemaCache(..)
+ , ProvidesFieldInfoMap(..)
+ , HDBQuery(..)
+ , SchemaCachePolicy(..)
+ , queryModifiesSchema
+
+ , P1
+ , P1C
+ , MonadTx(..)
+ , UserInfoM(..)
+ , RespBody
+ , P2C
+ -- , P2Res
+ , liftP1
+ , runP1
+ , successMsg
+
+ , QCtx(..)
+ , HasQCtx(..)
+ , mkAdminQCtx
+ , askTabInfo
+ , askFieldInfoMap
+ , askPGType
+ , assertPGCol
+ , askRelType
+ , askFieldInfo
+ , askPGColInfo
+ , askCurRole
+
+ , askQTemplateInfo
+
+ , adminOnly
+ , defaultTxErrorHandler
+
+ , HeaderObj
+
+ , module R
+ ) where
+
+import Hasura.RQL.Types.Common as R
+import Hasura.RQL.Types.DML as R
+import Hasura.RQL.Types.Error as R
+import Hasura.RQL.Types.Permission as R
+import Hasura.RQL.Types.SchemaCache as R
+import Hasura.SQL.Types
+import Hasura.Prelude
+
+import qualified Database.PG.Query as Q
+
+import Data.Aeson
+
+import qualified Data.ByteString.Lazy as BL
+import qualified Data.HashMap.Strict as M
+import qualified Data.Text as T
+
+class ProvidesFieldInfoMap r where
+ getFieldInfoMap :: QualifiedTable -> r -> Maybe FieldInfoMap
+
+class HasSchemaCache a where
+ getSchemaCache :: a -> SchemaCache
+
+instance HasSchemaCache QCtx where
+ getSchemaCache = qcSchemaCache
+
+instance HasSchemaCache SchemaCache where
+ getSchemaCache = id
+
+instance ProvidesFieldInfoMap SchemaCache where
+ getFieldInfoMap tn =
+ fmap tiFieldInfoMap . M.lookup tn . scTables
+
+-- There are two phases to every query.
+-- Phase 1 : Use the cached env to validate or invalidate
+-- Phase 2 : Hit Postgres if need to
+
+class HDBQuery q where
+ type Phase1Res q -- Phase 1 result
+
+ -- Use QCtx
+ phaseOne :: q -> P1 (Phase1Res q)
+
+ -- Hit Postgres
+ phaseTwo :: q -> Phase1Res q -> P2 BL.ByteString
+
+ schemaCachePolicy :: SchemaCachePolicy q
+
+data SchemaCachePolicy a
+ = SCPReload
+ | SCPNoChange
+ deriving (Show, Eq)
+
+schemaCachePolicyToBool :: SchemaCachePolicy a -> Bool
+schemaCachePolicyToBool SCPReload = True
+schemaCachePolicyToBool SCPNoChange = False
+
+getSchemaCachePolicy :: (HDBQuery a) => a -> SchemaCachePolicy a
+getSchemaCachePolicy _ = schemaCachePolicy
+
+type RespBody = BL.ByteString
+
+queryModifiesSchema :: (HDBQuery q) => q -> Bool
+queryModifiesSchema =
+ schemaCachePolicyToBool . getSchemaCachePolicy
+
+data QCtx
+ = QCtx
+ { qcUserInfo :: !UserInfo
+ , qcSchemaCache :: !SchemaCache
+ } deriving (Show, Eq)
+
+class HasQCtx a where
+ getQCtx :: a -> QCtx
+
+instance HasQCtx QCtx where
+ getQCtx = id
+
+mkAdminQCtx :: SchemaCache -> QCtx
+mkAdminQCtx = QCtx adminUserInfo
+
+type P2 = StateT SchemaCache (ReaderT UserInfo (Q.TxE QErr))
+
+class (Monad m) => UserInfoM m where
+ askUserInfo :: m UserInfo
+
+type P1C m = (UserInfoM m, QErrM m, CacheRM m)
+
+askTabInfo
+ :: (QErrM m, CacheRM m)
+ => QualifiedTable -> m TableInfo
+askTabInfo tabName = do
+ rawSchemaCache <- askSchemaCache
+ liftMaybe (err400 NotExists errMsg) $ M.lookup tabName $ scTables rawSchemaCache
+ where
+ errMsg = "table " <> tabName <<> " does not exist"
+
+askQTemplateInfo
+ :: (P1C m)
+ => TQueryName
+ -> m QueryTemplateInfo
+askQTemplateInfo qtn = do
+ rawSchemaCache <- askSchemaCache
+ liftMaybe (err400 NotExists errMsg) $ M.lookup qtn $ scQTemplates rawSchemaCache
+ where
+ errMsg = "query-template " <> qtn <<> " does not exist"
+
+instance UserInfoM P1 where
+ askUserInfo = qcUserInfo <$> ask
+
+instance CacheRM P1 where
+ askSchemaCache = qcSchemaCache <$> ask
+
+instance UserInfoM P2 where
+ askUserInfo = ask
+
+type P2C m = (QErrM m, CacheRWM m, MonadTx m)
+
+class (Monad m) => MonadTx m where
+ liftTx :: Q.TxE QErr a -> m a
+
+instance (MonadTx m) => MonadTx (StateT s m) where
+ liftTx = lift . liftTx
+
+instance (MonadTx m) => MonadTx (ReaderT s m) where
+ liftTx = lift . liftTx
+
+instance MonadTx (Q.TxE QErr) where
+ liftTx = id
+
+type P1 = ExceptT QErr (Reader QCtx)
+
+runP1 :: QCtx -> P1 a -> Either QErr a
+runP1 qEnv m = runReader (runExceptT m) qEnv
+
+liftMaybe :: (QErrM m) => QErr -> Maybe a -> m a
+liftMaybe e = maybe (throwError e) return
+
+liftP1 :: (MonadError QErr m) => QCtx -> P1 a -> m a
+liftP1 r m = liftEither $ runP1 r m
+
+askFieldInfoMap
+ :: (QErrM m, CacheRM m)
+ => QualifiedTable -> m FieldInfoMap
+askFieldInfoMap tabName = do
+ mFieldInfoMap <- getFieldInfoMap tabName <$> askSchemaCache
+ maybe (throw400 NotExists errMsg) return mFieldInfoMap
+ where
+ errMsg = "table " <> tabName <<> " does not exist"
+
+askPGType :: (MonadError QErr m)
+ => FieldInfoMap
+ -> PGCol
+ -> T.Text
+ -> m PGColType
+askPGType m c msg = do
+ colInfo <- modifyErr ("column " <>) $
+ askFieldInfo m (fromPGCol c)
+ case colInfo of
+ (FIColumn pgColInfo) ->
+ return $ pgiType pgColInfo
+ _ ->
+ throwError $ err400 UnexpectedPayload $ mconcat
+ [ "expecting a postgres column; but, "
+ , c <<> " is a relationship; "
+ , msg
+ ]
+
+assertPGCol :: (MonadError QErr m)
+ => FieldInfoMap
+ -> T.Text
+ -> PGCol
+ -> m ()
+assertPGCol m msg c = do
+ _ <- askPGType m c msg
+ return ()
+
+askRelType :: (MonadError QErr m)
+ => FieldInfoMap
+ -> RelName
+ -> T.Text
+ -> m RelInfo
+askRelType m r msg = do
+ colInfo <- modifyErr ("relationship " <>) $
+ askFieldInfo m (fromRel r)
+ case colInfo of
+ (FIRelationship relInfo) -> return relInfo
+ _ ->
+ throwError $ err400 UnexpectedPayload $ mconcat
+ [ "expecting a relationship; but, "
+ , r <<> " is a postgres column; "
+ , msg
+ ]
+
+askFieldInfo :: (MonadError QErr m)
+ => FieldInfoMap
+ -> FieldName
+ -> m FieldInfo
+askFieldInfo m f =
+ case M.lookup f m of
+ Just colInfo -> return colInfo
+ Nothing ->
+ throw400 NotExists $ mconcat
+ [ f <<> " does not exist"
+ ]
+
+askPGColInfo :: (MonadError QErr m)
+ => M.HashMap PGCol PGColInfo
+ -> PGCol
+ -> m PGColInfo
+askPGColInfo m c =
+ case M.lookup c m of
+ Just colInfo -> return colInfo
+ Nothing ->
+ throw400 NotExists $ mconcat
+ [ c <<> " does not exist"
+ ]
+
+askCurRole :: (UserInfoM m) => m RoleName
+askCurRole = userRole <$> askUserInfo
+
+adminOnly :: (UserInfoM m, QErrM m) => m ()
+adminOnly = do
+ curRole <- askCurRole
+ unless (curRole == adminRole) $ throw400 AccessDenied errMsg
+ where
+ errMsg = "restricted access : admin only"
+
+defaultTxErrorHandler :: Q.PGTxErr -> QErr
+defaultTxErrorHandler txe =
+ let e = err500 PostgresError "postgres query error"
+ in e {qeInternal = Just $ toJSON txe}
+
+successMsg :: BL.ByteString
+successMsg = "{\"message\":\"success\"}"
+
+type HeaderObj = M.HashMap T.Text T.Text
diff --git a/server/src-lib/Hasura/RQL/Types/Common.hs b/server/src-lib/Hasura/RQL/Types/Common.hs
new file mode 100644
index 00000000..c8f9e421
--- /dev/null
+++ b/server/src-lib/Hasura/RQL/Types/Common.hs
@@ -0,0 +1,188 @@
+{-# LANGUAGE DeriveLift #-}
+{-# LANGUAGE DeriveTraversable #-}
+{-# LANGUAGE FlexibleInstances #-}
+{-# LANGUAGE GeneralizedNewtypeDeriving #-}
+{-# LANGUAGE LambdaCase #-}
+{-# LANGUAGE MultiWayIf #-}
+{-# LANGUAGE OverloadedStrings #-}
+
+module Hasura.RQL.Types.Common
+ ( RelName(..)
+ , RelType(..)
+ , relTypeToTxt
+
+ , FieldName(..)
+ , fromPGCol
+ , fromRel
+
+ , ColExp(..)
+ , GBoolExp(..)
+ , BoolExp
+ , foldBoolExp
+
+ , TQueryName(..)
+ , TemplateParam(..)
+
+ , ToAesonPairs(..)
+ , WithTable(..)
+ ) where
+
+import Hasura.Prelude
+import qualified Hasura.SQL.DML as S
+import Hasura.SQL.Types
+
+import Data.Aeson
+import Data.Aeson.Internal
+import qualified Data.HashMap.Strict as M
+import qualified Data.Text as T
+import qualified Database.PG.Query as Q
+import Instances.TH.Lift ()
+import Language.Haskell.TH.Syntax (Lift)
+import qualified PostgreSQL.Binary.Decoding as PD
+
+newtype RelName
+ = RelName {getRelTxt :: T.Text}
+ deriving (Show, Eq, Hashable, FromJSON, ToJSON, Q.ToPrepArg, Q.FromCol, Lift)
+
+instance IsIden RelName where
+ toIden (RelName r) = Iden r
+
+instance DQuote RelName where
+ dquoteTxt (RelName r) = r
+
+relTypeToTxt :: RelType -> T.Text
+relTypeToTxt ObjRel = "object"
+relTypeToTxt ArrRel = "array"
+
+data RelType
+ = ObjRel
+ | ArrRel
+ deriving (Show, Eq)
+
+instance ToJSON RelType where
+ toJSON = String . relTypeToTxt
+
+instance FromJSON RelType where
+ parseJSON (String "object") = return ObjRel
+ parseJSON (String "array") = return ArrRel
+ parseJSON _ = fail "expecting either 'object' or 'array' for rel_type"
+
+instance Q.FromCol RelType where
+ fromCol bs = flip Q.fromColHelper bs $ PD.enum $ \case
+ "object" -> Just ObjRel
+ "array" -> Just ArrRel
+ _ -> Nothing
+
+newtype FieldName
+ = FieldName { getFieldNameTxt :: T.Text }
+ deriving (Show, Eq, Hashable, FromJSON, ToJSON, FromJSONKey, ToJSONKey, Lift)
+
+instance IsIden FieldName where
+ toIden (FieldName f) = Iden f
+
+instance DQuote FieldName where
+ dquoteTxt (FieldName c) = c
+
+fromPGCol :: PGCol -> FieldName
+fromPGCol (PGCol c) = FieldName c
+
+fromRel :: RelName -> FieldName
+fromRel (RelName r) = FieldName r
+
+type BoolExp = GBoolExp ColExp
+
+data ColExp
+ = ColExp
+ { ceCol :: !FieldName
+ , ceVal :: !Value
+ } deriving (Show, Eq, Lift)
+
+data GBoolExp a
+ = BoolAnd ![GBoolExp a]
+ | BoolOr ![GBoolExp a]
+ | BoolCol !a
+ | BoolNot !(GBoolExp a)
+ deriving (Show, Eq, Lift, Functor, Foldable, Traversable)
+
+instance ToJSON (GBoolExp ColExp) where
+ toJSON (BoolAnd bExps) =
+ object $ flip map bExps $ \bExp -> case bExp of
+ BoolOr cbExps -> "$or" .= cbExps
+ BoolAnd cbExps -> "$and" .= cbExps
+ BoolCol (ColExp k v) -> getFieldNameTxt k .= v
+ BoolNot notExp -> "$not" .= notExp
+ toJSON (BoolOr bExps) =
+ object $ flip map bExps $ \bExp -> case bExp of
+ BoolOr cbExps -> "$or" .= cbExps
+ BoolAnd cbExps -> "$and" .= cbExps
+ BoolCol (ColExp k v) -> getFieldNameTxt k .= v
+ BoolNot notExp -> "$not" .= notExp
+ toJSON (BoolCol (ColExp k v)) =
+ object [ getFieldNameTxt k .= v ]
+ toJSON (BoolNot notExp) =
+ object [ "$not" .= notExp ]
+
+instance FromJSON (GBoolExp ColExp) where
+ parseJSON (Object o) = do
+ boolExps <- forM (M.toList o) $ \(k, v) -> if
+ | k == "$or" -> BoolOr <$> parseJSON v > Key k
+ | k == "_or" -> BoolOr <$> parseJSON v > Key k
+ | k == "$and" -> BoolAnd <$> parseJSON v > Key k
+ | k == "_and" -> BoolAnd <$> parseJSON v > Key k
+ | k == "$not" -> BoolNot <$> parseJSON v > Key k
+ | k == "_not" -> BoolNot <$> parseJSON v > Key k
+ | otherwise -> fmap (BoolCol . ColExp (FieldName k)) $ parseJSON v
+ return $ BoolAnd boolExps
+ parseJSON _ = fail "expecting an Object for boolean exp"
+
+foldBoolExp :: (Monad m)
+ => (a -> m S.BoolExp)
+ -> GBoolExp a
+ -> m S.BoolExp
+foldBoolExp f (BoolAnd bes) = do
+ sqlBExps <- mapM (foldBoolExp f) bes
+ return $ foldr (S.BEBin S.AndOp) (S.BELit True) sqlBExps
+foldBoolExp f (BoolOr bes) = do
+ sqlBExps <- mapM (foldBoolExp f) bes
+ return $ foldr (S.BEBin S.OrOp) (S.BELit False) sqlBExps
+foldBoolExp f (BoolNot notExp) =
+ S.BENot <$> foldBoolExp f notExp
+foldBoolExp f (BoolCol ce) =
+ f ce
+
+newtype TQueryName
+ = TQueryName { getTQueryName :: T.Text }
+ deriving ( Show, Eq, Hashable, FromJSONKey, ToJSONKey
+ , FromJSON, ToJSON, Q.ToPrepArg, Q.FromCol, Lift)
+
+instance IsIden TQueryName where
+ toIden (TQueryName r) = Iden r
+
+instance DQuote TQueryName where
+ dquoteTxt (TQueryName r) = r
+
+newtype TemplateParam
+ = TemplateParam { getTemplateParam :: T.Text }
+ deriving (Show, Eq, Hashable, FromJSON, FromJSONKey, ToJSONKey, ToJSON, Lift)
+
+instance DQuote TemplateParam where
+ dquoteTxt (TemplateParam r) = r
+
+class ToAesonPairs a where
+ toAesonPairs :: (KeyValue v) => a -> [v]
+
+data WithTable a
+ = WithTable
+ { wtName :: !QualifiedTable
+ , wtInfo :: !a
+ } deriving (Show, Eq, Lift)
+
+instance (FromJSON a) => FromJSON (WithTable a) where
+ parseJSON v@(Object o) =
+ WithTable <$> o .: "table" <*> parseJSON v
+ parseJSON _ =
+ fail "expecting an Object with key 'table'"
+
+instance (ToAesonPairs a) => ToJSON (WithTable a) where
+ toJSON (WithTable tn rel) =
+ object $ ("table" .= tn):toAesonPairs rel
diff --git a/server/src-lib/Hasura/RQL/Types/DML.hs b/server/src-lib/Hasura/RQL/Types/DML.hs
new file mode 100644
index 00000000..b7c7b8fc
--- /dev/null
+++ b/server/src-lib/Hasura/RQL/Types/DML.hs
@@ -0,0 +1,331 @@
+{-# LANGUAGE DeriveLift #-}
+{-# LANGUAGE FlexibleInstances #-}
+{-# LANGUAGE GeneralizedNewtypeDeriving #-}
+{-# LANGUAGE LambdaCase #-}
+{-# LANGUAGE OverloadedStrings #-}
+{-# LANGUAGE TemplateHaskell #-}
+{-# OPTIONS_GHC -fno-warn-orphans #-}
+
+module Hasura.RQL.Types.DML
+ ( DMLQuery(..)
+
+ , OrderByExp(..)
+ , OrderByItem(..)
+ , OrderByCol(..)
+
+ , SelectG(..)
+ , selectGToPairs
+
+ , Wildcard(..)
+ , SelCol(..)
+ , SelectQ
+ , SelectQuery
+
+ , InsObj
+ , InsertQuery(..)
+ , OnConflict(..)
+ , ConflictAction(..)
+ , ConstraintOn(..)
+
+ , UpdVals
+ , UpdateQuery(..)
+
+ , DeleteQuery(..)
+
+ , CountQuery(..)
+
+ , QueryT(..)
+
+ ) where
+
+import qualified Hasura.SQL.DML as S
+
+import Hasura.Prelude
+import Hasura.RQL.Types.Common
+import Hasura.SQL.Types
+
+import Data.Aeson
+import Data.Aeson.Casing
+import Data.Aeson.TH
+import qualified Data.Attoparsec.Text as Atto
+import qualified Data.Attoparsec.Text as AT
+import qualified Data.Attoparsec.Types as AttoT
+import qualified Data.HashMap.Strict as M
+import qualified Data.Text as T
+import Hasura.RQL.Instances ()
+import Instances.TH.Lift ()
+import Language.Haskell.TH.Syntax (Lift)
+
+data DMLQuery a
+ = DMLQuery !QualifiedTable a
+ deriving (Show, Eq, Lift)
+
+instance (FromJSON a) => FromJSON (DMLQuery a) where
+ parseJSON o@(Object v) =
+ DMLQuery
+ <$> v .: "table"
+ <*> parseJSON o
+ parseJSON _ =
+ fail "Expected an object for query"
+
+$(deriveJSON defaultOptions{constructorTagModifier = snakeCase . drop 2} ''S.OrderType)
+
+$(deriveJSON defaultOptions{constructorTagModifier = snakeCase . drop 1} ''S.NullsOrder)
+
+newtype OrderByCol
+ = OrderByCol { getOrderByColPath :: [T.Text] }
+ deriving (Show, Eq, Lift)
+
+instance ToJSON OrderByCol where
+ toJSON (OrderByCol paths) =
+ String $ T.intercalate "." paths
+
+orderByColFromTxt :: T.Text -> OrderByCol
+orderByColFromTxt =
+ OrderByCol . T.split (=='.')
+
+instance FromJSON OrderByCol where
+ parseJSON (String t) =
+ return $ orderByColFromTxt t
+ parseJSON v =
+ OrderByCol <$> parseJSON v
+
+data OrderByItem
+ = OrderByItem
+ { obiType :: !(Maybe S.OrderType)
+ , obiColumn :: !OrderByCol
+ , obiNulls :: !(Maybe S.NullsOrder)
+ } deriving (Show, Eq, Lift)
+
+$(deriveToJSON (aesonDrop 3 snakeCase){omitNothingFields=True} ''OrderByItem)
+
+-- Can either be string / object
+instance FromJSON OrderByItem where
+ parseJSON (String t) =
+ case Atto.parseOnly orderByParser t of
+ Right r -> return r
+ Left _ ->
+ fail "string format for 'order_by' entry : {+/-}column Eg : +posted"
+
+ parseJSON (Object o) =
+ OrderByItem
+ <$> o .:? "type"
+ <*> o .: "column"
+ <*> o .:? "nulls"
+ parseJSON _ = fail "expecting an object or string for order by"
+
+newtype OrderByExp
+ = OrderByExp { getOrderByItems :: [OrderByItem] }
+ deriving (Show, Eq, ToJSON, Lift)
+
+instance FromJSON OrderByExp where
+ parseJSON v@(String _) =
+ OrderByExp . (:[]) <$> parseJSON v
+ parseJSON v@(Array _) =
+ OrderByExp <$> parseJSON v
+ parseJSON v@(Object _) =
+ OrderByExp . (:[]) <$> parseJSON v
+ parseJSON _ =
+ fail "Expecting : array/string/object"
+
+orderByParser :: AttoT.Parser T.Text OrderByItem
+orderByParser =
+ OrderByItem <$> otP <*> colP <*> (return Nothing)
+ where
+ otP = ("+" *> return (Just S.OTAsc))
+ <|> ("-" *> return (Just S.OTDesc))
+ <|> (return Nothing)
+ colP = orderByColFromTxt <$> Atto.takeText
+
+data SelectG a b
+ = SelectG
+ { sqColumns :: ![a] -- Postgres columns and relationships
+ , sqWhere :: !(Maybe b) -- Filter
+ , sqOrderBy :: !(Maybe OrderByExp) -- Ordering
+ , sqLimit :: !(Maybe Value) -- Limit
+ , sqOffset :: !(Maybe Value) -- Offset
+ } deriving (Show, Eq, Lift)
+
+$(deriveJSON (aesonDrop 2 snakeCase){omitNothingFields=True} ''SelectG)
+
+selectGToPairs :: (KeyValue kv, ToJSON a, ToJSON b)
+ => SelectG a b -> [kv]
+selectGToPairs (SelectG selCols mWh mOb mLt mOf) =
+ [ "columns" .= selCols
+ , "where" .= mWh
+ , "order_by" .= mOb
+ , "limit" .= mLt
+ , "offset" .= mOf
+ ]
+
+data Wildcard
+ = Star
+ | StarDot !Wildcard
+ deriving (Show, Eq, Ord, Lift)
+
+wcToText :: Wildcard -> T.Text
+wcToText Star = "*"
+wcToText (StarDot wc) = "*." <> wcToText wc
+
+parseWildcard :: AT.Parser Wildcard
+parseWildcard =
+ fromList <$> ((starParser `AT.sepBy1` (AT.char '.')) <* AT.endOfInput)
+ where
+ starParser = AT.char '*' *> pure Star
+ fromList = foldr1 (\_ x -> StarDot x)
+
+-- Columns in RQL
+data SelCol
+ = SCStar !Wildcard
+ | SCExtSimple !PGCol
+ | SCExtRel !RelName !(Maybe RelName) !SelectQ
+ deriving (Show, Eq, Lift)
+
+instance FromJSON SelCol where
+ parseJSON (String s) =
+ case AT.parseOnly parseWildcard s of
+ Left _ -> return $ SCExtSimple $ PGCol s
+ Right x -> return $ SCStar x
+ parseJSON v@(Object o) =
+ SCExtRel
+ <$> o .: "name"
+ <*> o .:? "alias"
+ <*> parseJSON v
+ parseJSON _ =
+ fail $ mconcat
+ [ "A column should either be a string or an "
+ , "object (relationship)"
+ ]
+
+instance ToJSON SelCol where
+ toJSON (SCStar wc) = String $ wcToText wc
+ toJSON (SCExtSimple s) = toJSON s
+ toJSON (SCExtRel rn mrn selq) =
+ object $ [ "name" .= rn
+ , "alias" .= mrn
+ ] ++ selectGToPairs selq
+
+type SelectQ = SelectG SelCol BoolExp
+
+type SelectQuery = DMLQuery SelectQ
+
+instance ToJSON SelectQuery where
+ toJSON (DMLQuery qt selQ) =
+ object $ "table" .= qt : selectGToPairs selQ
+
+type InsObj = M.HashMap PGCol Value
+
+data ConflictAction
+ = CAIgnore
+ | CAUpdate
+ deriving (Show, Eq, Lift)
+
+instance FromJSON ConflictAction where
+ parseJSON (String "ignore") = return CAIgnore
+ parseJSON (String "update") = return CAUpdate
+ parseJSON _ =
+ fail "Expecting 'ignore' or 'update'"
+
+instance ToJSON ConflictAction where
+ toJSON CAUpdate = String "update"
+ toJSON CAIgnore = String "ignore"
+
+newtype ConstraintOn
+ = ConstraintOn {getPGCols :: [PGCol]} deriving (Show, Eq, Lift, ToJSON)
+
+instance FromJSON ConstraintOn where
+ parseJSON v@(String _) =
+ ConstraintOn . (:[]) <$> parseJSON v
+ parseJSON v@(Array _) =
+ ConstraintOn <$> parseJSON v
+ parseJSON _ = fail
+ "Expecting String or Array"
+
+data OnConflict
+ = OnConflict
+ { ocConstraintOn :: !(Maybe ConstraintOn)
+ , ocConstraint :: !(Maybe ConstraintName)
+ , ocAction :: !ConflictAction
+ } deriving (Show, Eq, Lift)
+
+$(deriveJSON (aesonDrop 2 snakeCase){omitNothingFields=True} ''OnConflict)
+
+data InsertQuery
+ = InsertQuery
+ { iqTable :: !QualifiedTable
+ , iqObjects :: !Value
+ , iqOnConflict :: !(Maybe OnConflict)
+ , iqReturning :: !(Maybe [PGCol])
+ } deriving (Show, Eq, Lift)
+
+$(deriveJSON (aesonDrop 2 snakeCase){omitNothingFields=True} ''InsertQuery)
+
+type UpdVals = M.HashMap PGCol Value
+
+data UpdateQuery
+ = UpdateQuery
+ { uqTable :: !QualifiedTable
+ , uqWhere :: !BoolExp
+ , uqSet :: !UpdVals
+ , uqInc :: !UpdVals
+ , uqMul :: !UpdVals
+ , uqDefault :: ![PGCol]
+ , uqReturning :: !(Maybe [PGCol])
+ } deriving (Show, Eq, Lift)
+
+instance FromJSON UpdateQuery where
+ parseJSON (Object o) =
+ UpdateQuery
+ <$> o .: "table"
+ <*> o .: "where"
+ <*> ((o .: "$set" <|> o .:? "values") .!= M.empty)
+ <*> (o .:? "$inc" .!= M.empty)
+ <*> (o .:? "$mul" .!= M.empty)
+ <*> o .:? "$default" .!= []
+ <*> o .:? "returning"
+ parseJSON _ =
+ fail "Expecting an object for update query"
+
+instance ToJSON UpdateQuery where
+ toJSON (UpdateQuery tn wc setE incE mulE defE ret) =
+ object [ "table" .= tn
+ , "where" .= wc
+ , "$set" .= setE
+ , "$inc" .= incE
+ , "$mul" .= mulE
+ , "$default" .= defE
+ , "returning" .= ret
+ ]
+
+data DeleteQuery
+ = DeleteQuery
+ { doTable :: !QualifiedTable
+ , doWhere :: !BoolExp -- where clause
+ , doReturning :: !(Maybe [PGCol]) -- columns returning
+ } deriving (Show, Eq, Lift)
+
+$(deriveJSON (aesonDrop 2 snakeCase){omitNothingFields=True} ''DeleteQuery)
+
+data CountQuery
+ = CountQuery
+ { cqTable :: !QualifiedTable
+ , cqDistinct :: !(Maybe [PGCol])
+ , cqWhere :: !(Maybe BoolExp)
+ } deriving (Show, Eq, Lift)
+
+$(deriveJSON (aesonDrop 2 snakeCase){omitNothingFields=True} ''CountQuery)
+
+data QueryT
+ = QTInsert !InsertQuery
+ | QTSelect !SelectQuery
+ | QTUpdate !UpdateQuery
+ | QTDelete !DeleteQuery
+ | QTCount !CountQuery
+ | QTBulk ![QueryT]
+ deriving (Show, Eq, Lift)
+
+$(deriveJSON
+ defaultOptions { constructorTagModifier = snakeCase . drop 2
+ , sumEncoding = TaggedObject "type" "args"
+ }
+ ''QueryT)
diff --git a/server/src-lib/Hasura/RQL/Types/Error.hs b/server/src-lib/Hasura/RQL/Types/Error.hs
new file mode 100644
index 00000000..4ab5419b
--- /dev/null
+++ b/server/src-lib/Hasura/RQL/Types/Error.hs
@@ -0,0 +1,256 @@
+{-# LANGUAGE ConstraintKinds #-}
+{-# LANGUAGE FlexibleContexts #-}
+{-# LANGUAGE OverloadedStrings #-}
+
+module Hasura.RQL.Types.Error
+ ( Code(..)
+ , QErr(..)
+ , encodeQErr
+ , nonAdminQErrEnc
+ , err400
+ , err401
+ , err500
+
+ , QErrM
+ , throw400
+ , throw404
+ , throw500
+ , throw401
+
+ -- Aeson helpers
+ , runAesonParser
+ , decodeValue
+
+ -- Modify error messages
+ , modifyErr
+ , modifyErrAndSet500
+
+ -- Attach context
+ , withPathK
+ , withPathI
+ , indexedFoldM
+ , indexedForM
+ , indexedForM_
+ ) where
+
+import Data.Aeson
+import Data.Aeson.Internal
+import Data.Aeson.Types
+import qualified Database.PG.Query as Q
+import Hasura.Prelude
+import Text.Show (Show (..))
+
+import qualified Data.Text as T
+import qualified Network.HTTP.Types as N
+
+data Code
+ = PermissionDenied
+ | NotNullViolation
+ | NotExists
+ | AlreadyExists
+ | PostgresError
+ | NotSupported
+ | DependencyError
+ | InvalidHeaders
+ | InvalidJSON
+ | AccessDenied
+ | ParseFailed
+ | ConstraintError
+ | PermissionError
+ | NotFound
+ | Unexpected
+ | UnexpectedPayload
+ | NoUpdate
+ | AlreadyTracked
+ | AlreadyUntracked
+ | InvalidParams
+ | AlreadyInit
+ -- Graphql error
+ | NoTables
+ | ValidationFailed
+ deriving (Eq)
+
+instance Show Code where
+ show NotNullViolation = "not-null-violation"
+ show PermissionDenied = "permission-denied"
+ show NotExists = "not-exists"
+ show AlreadyExists = "already-exists"
+ show AlreadyTracked = "already-tracked"
+ show AlreadyUntracked = "already-untracked"
+ show PostgresError = "postgres-error"
+ show NotSupported = "not-supported"
+ show DependencyError = "dependency-error"
+ show InvalidHeaders = "invalid-headers"
+ show InvalidJSON = "invalid-json"
+ show AccessDenied = "access-denied"
+ show ParseFailed = "parse-failed"
+ show ConstraintError = "constraint-error"
+ show PermissionError = "permission-error"
+ show NotFound = "not-found"
+ show Unexpected = "unexpected"
+ show UnexpectedPayload = "unexpected-payload"
+ show NoUpdate = "no-update"
+ show InvalidParams = "invalid-params"
+ show AlreadyInit = "already-initialised"
+ show NoTables = "no-tables"
+ show ValidationFailed = "validation-failed"
+
+data QErr
+ = QErr
+ { qePath :: !JSONPath
+ , qeStatus :: !N.Status
+ , qeError :: !T.Text
+ , qeCode :: !Code
+ , qeInternal :: !(Maybe Value)
+ } deriving (Show, Eq)
+
+instance ToJSON QErr where
+ toJSON (QErr jPath _ msg code Nothing) =
+ object
+ [ "path" .= encodeJSONPath jPath
+ , "error" .= msg
+ , "code" .= show code
+ ]
+ toJSON (QErr jPath _ msg code (Just ie)) =
+ object
+ [ "path" .= encodeJSONPath jPath
+ , "error" .= msg
+ , "code" .= show code
+ , "internal" .= ie
+ ]
+
+nonAdminQErrEnc :: QErr -> Value
+nonAdminQErrEnc (QErr jPath _ msg code _) =
+ object
+ [ "path" .= encodeJSONPath jPath
+ , "error" .= msg
+ , "code" .= show code
+ ]
+
+encodeQErr :: T.Text -> QErr -> Value
+encodeQErr "admin" = toJSON
+encodeQErr _ = nonAdminQErrEnc
+
+encodeJSONPath :: JSONPath -> String
+encodeJSONPath = format "$"
+ where
+ format pfx [] = pfx
+ format pfx (Index idx:parts) = format (pfx ++ "[" ++ show idx ++ "]") parts
+ format pfx (Key key:parts) = format (pfx ++ "." ++ formatKey key) parts
+
+ formatKey key
+ | T.any (=='.') key = "['" ++ T.unpack key ++ "']"
+ | otherwise = T.unpack key
+
+instance Q.FromPGConnErr QErr where
+ fromPGConnErr c =
+ let e = err500 PostgresError "connection error"
+ in e {qeInternal = Just $ toJSON c}
+
+instance Q.FromPGTxErr QErr where
+ fromPGTxErr txe =
+ let e = err500 PostgresError "postgres tx error"
+ in e {qeInternal = Just $ toJSON txe}
+
+err400 :: Code -> T.Text -> QErr
+err400 c t = QErr [] N.status400 t c Nothing
+
+err404 :: Code -> T.Text -> QErr
+err404 c t = QErr [] N.status404 t c Nothing
+
+err401 :: Code -> T.Text -> QErr
+err401 c t = QErr [] N.status401 t c Nothing
+
+err500 :: Code -> T.Text -> QErr
+err500 c t = QErr [] N.status500 t c Nothing
+
+type QErrM m = (MonadError QErr m)
+
+throw400 :: (QErrM m) => Code -> T.Text -> m a
+throw400 c t = throwError $ err400 c t
+
+throw404 :: (QErrM m) => Code -> T.Text -> m a
+throw404 c t = throwError $ err404 c t
+
+throw401 :: (QErrM m) => T.Text -> m a
+throw401 t = throwError $ err401 AccessDenied t
+
+throw500 :: (QErrM m) => T.Text -> m a
+throw500 t = throwError $ err500 Unexpected t
+
+modifyQErr :: (QErrM m)
+ => (QErr -> QErr) -> m a -> m a
+modifyQErr f a = catchError a (throwError . f)
+
+modifyErr :: (QErrM m)
+ => (T.Text -> T.Text)
+ -> m a -> m a
+modifyErr f = modifyQErr (liftTxtMod f)
+
+liftTxtMod :: (T.Text -> T.Text) -> QErr -> QErr
+liftTxtMod f (QErr path st s c i) = QErr path st (f s) c i
+
+modifyErrAndSet500 :: (QErrM m)
+ => (T.Text -> T.Text)
+ -> m a -> m a
+modifyErrAndSet500 f = modifyQErr (liftTxtMod500 f)
+
+liftTxtMod500 :: (T.Text -> T.Text) -> QErr -> QErr
+liftTxtMod500 f (QErr path _ s c i) = QErr path N.status500 (f s) c i
+
+withPathE :: (QErrM m)
+ => JSONPathElement -> m a -> m a
+withPathE pe m =
+ catchError m (throwError . injectPrefix)
+ where
+ injectPrefix (QErr path st msg code i) = QErr (pe:path) st msg code i
+
+withPathK :: (QErrM m)
+ => T.Text -> m a -> m a
+withPathK = withPathE . Key
+
+withPathI :: (QErrM m)
+ => Int -> m a -> m a
+withPathI = withPathE . Index
+
+indexedFoldM :: (QErrM m)
+ => (b -> a -> m b)
+ -> b -> [a] -> m b
+indexedFoldM f b al =
+ foldM f' b $ zip [0..] al
+ where
+ f' accum (i, a) = withPathE (Index i) (f accum a)
+
+indexedForM :: (QErrM m)
+ => [a] -> (a -> m b) -> m [b]
+indexedForM l f =
+ forM (zip [0..] l) $ \(i, a) ->
+ withPathE (Index i) (f a)
+
+indexedForM_ :: (QErrM m)
+ => [a] -> (a -> m ()) -> m ()
+indexedForM_ l f =
+ forM_ (zip [0..] l) $ \(i, a) ->
+ withPathE (Index i) (f a)
+
+liftIResult :: (QErrM m) => IResult a -> m a
+liftIResult (IError path msg) =
+ throwError $ QErr path N.status400 (T.pack $ formatMsg msg) ParseFailed Nothing
+liftIResult (ISuccess a) =
+ return a
+
+formatMsg :: String -> String
+formatMsg str = case T.splitOn "the key " txt of
+ [_, txt2] -> case T.splitOn " was not present" txt2 of
+ [key, _] -> "the key '" ++ T.unpack key ++ "' was not present"
+ _ -> str
+ _ -> str
+ where
+ txt = T.pack str
+
+runAesonParser :: (QErrM m) => (Value -> Parser a) -> Value -> m a
+runAesonParser p =
+ liftIResult . iparse p
+
+decodeValue :: (FromJSON a, QErrM m) => Value -> m a
+decodeValue = liftIResult . ifromJSON
diff --git a/server/src-lib/Hasura/RQL/Types/Permission.hs b/server/src-lib/Hasura/RQL/Types/Permission.hs
new file mode 100644
index 00000000..a262c93f
--- /dev/null
+++ b/server/src-lib/Hasura/RQL/Types/Permission.hs
@@ -0,0 +1,110 @@
+{-# LANGUAGE DeriveLift #-}
+{-# LANGUAGE GeneralizedNewtypeDeriving #-}
+{-# LANGUAGE LambdaCase #-}
+{-# LANGUAGE OverloadedStrings #-}
+
+module Hasura.RQL.Types.Permission
+ ( RoleName(..)
+ , UserId(..)
+ , UserInfo(..)
+ , adminUserInfo
+ , adminRole
+ , PermType(..)
+ , permTypeToCode
+ , PermId(..)
+ ) where
+
+import Hasura.SQL.Types
+import Hasura.Prelude
+
+import qualified Database.PG.Query as Q
+
+import Data.Aeson
+import Data.Hashable
+import Data.Word
+import Instances.TH.Lift ()
+import Language.Haskell.TH.Syntax (Lift)
+
+import qualified Data.Text as T
+import qualified PostgreSQL.Binary.Decoding as PD
+
+newtype RoleName
+ = RoleName {getRoleTxt :: T.Text}
+ deriving ( Show, Eq, Hashable, FromJSONKey, ToJSONKey, FromJSON
+ , ToJSON, Q.FromCol, Q.ToPrepArg, Lift)
+
+instance DQuote RoleName where
+ dquoteTxt (RoleName r) = r
+
+adminRole :: RoleName
+adminRole = RoleName "admin"
+
+newtype UserId = UserId { getUserId :: Word64 }
+ deriving (Show, Eq, FromJSON, ToJSON)
+
+data UserInfo
+ = UserInfo
+ { userRole :: !RoleName
+ , userHeaders :: ![(T.Text, T.Text)]
+ } deriving (Show, Eq)
+
+adminUserInfo :: UserInfo
+adminUserInfo = UserInfo adminRole [("X-Hasura-User-Id", "0")]
+
+data PermType
+ = PTInsert
+ | PTSelect
+ | PTUpdate
+ | PTDelete
+ deriving (Eq, Lift)
+
+instance Q.FromCol PermType where
+ fromCol bs = flip Q.fromColHelper bs $ PD.enum $ \case
+ "insert" -> Just PTInsert
+ "update" -> Just PTUpdate
+ "select" -> Just PTSelect
+ "delete" -> Just PTDelete
+ _ -> Nothing
+
+permTypeToCode :: PermType -> T.Text
+permTypeToCode PTInsert = "insert"
+permTypeToCode PTSelect = "select"
+permTypeToCode PTUpdate = "update"
+permTypeToCode PTDelete = "delete"
+
+instance Hashable PermType where
+ hashWithSalt salt a = hashWithSalt salt $ permTypeToCode a
+
+instance Show PermType where
+ show PTInsert = "insert"
+ show PTSelect = "select"
+ show PTUpdate = "update"
+ show PTDelete = "delete"
+
+instance FromJSON PermType where
+ parseJSON (String "insert") = return PTInsert
+ parseJSON (String "select") = return PTSelect
+ parseJSON (String "update") = return PTUpdate
+ parseJSON (String "delete") = return PTDelete
+ parseJSON _ =
+ fail "perm_type should be one of 'insert', 'select', 'update', 'delete'"
+
+instance ToJSON PermType where
+ toJSON = String . permTypeToCode
+
+data PermId
+ = PermId
+ { pidTable :: !TableName
+ , pidRole :: !RoleName
+ , pidType :: !PermType
+ } deriving (Eq)
+
+instance Show PermId where
+ show (PermId tn rn pType) =
+ show $ mconcat
+ [ getTableTxt tn
+ , "."
+ , getRoleTxt rn
+ , "."
+ , T.pack $ show pType
+ ]
diff --git a/server/src-lib/Hasura/RQL/Types/SchemaCache.hs b/server/src-lib/Hasura/RQL/Types/SchemaCache.hs
new file mode 100644
index 00000000..184ac007
--- /dev/null
+++ b/server/src-lib/Hasura/RQL/Types/SchemaCache.hs
@@ -0,0 +1,592 @@
+{-# LANGUAGE DeriveGeneric #-}
+{-# LANGUAGE FlexibleContexts #-}
+{-# LANGUAGE FlexibleInstances #-}
+{-# LANGUAGE GADTs #-}
+{-# LANGUAGE OverloadedStrings #-}
+{-# LANGUAGE RankNTypes #-}
+{-# LANGUAGE TemplateHaskell #-}
+
+module Hasura.RQL.Types.SchemaCache
+ ( TableCache
+ , SchemaCache(..)
+ , emptySchemaCache
+ , TableInfo(..)
+ , mkTableInfo
+ , addTableToCache
+ , modTableInCache
+ , delTableFromCache
+
+ , CacheRM(..)
+ , CacheRWM(..)
+
+ , FieldInfoMap
+ , FieldInfo(..)
+ , fieldInfoToEither
+ , partitionFieldInfos
+ , partitionFieldInfosWith
+ , getCols
+ , getRels
+
+ , PGColInfo(..)
+ , isPGColInfo
+ , RelInfo(..)
+ , addFldToCache
+ , delFldFromCache
+
+ , RolePermInfo(..)
+ , permIns
+ , permSel
+ , permUpd
+ , permDel
+ , PermAccessor(..)
+ , permAccToLens
+ , permAccToType
+ , withPermType
+ , RolePermInfoMap
+ , InsPermInfo(..)
+ , SelPermInfo(..)
+ , UpdPermInfo(..)
+ , DelPermInfo(..)
+ , addPermToCache
+ , delPermFromCache
+
+ , QueryTemplateInfo(..)
+ , addQTemplateToCache
+ , delQTemplateFromCache
+ , TemplateParamInfo(..)
+
+ , TableObjId(..)
+ , SchemaObjId(..)
+ , reportSchemaObj
+ , reportSchemaObjs
+ , SchemaDependency(..)
+ , mkParentDep
+ , mkColDep
+ , getDependentObjs
+ , getDependentObjsWith
+ , getDependentObjsOfTable
+ , getDependentObjsOfQTemplateCache
+ , getDependentPermsOfTable
+ , getDependentRelsOfTable
+ , isDependentOn
+ ) where
+
+import Hasura.RQL.Types.Common
+import Hasura.RQL.Types.DML
+import Hasura.RQL.Types.Error
+import Hasura.RQL.Types.Permission
+import Hasura.Prelude
+import qualified Hasura.SQL.DML as S
+import Hasura.SQL.Types
+
+import Control.Lens
+import Data.Aeson
+import Data.Aeson.Casing
+import Data.Aeson.TH
+import GHC.Generics (Generic)
+
+import qualified Data.HashMap.Strict as M
+import qualified Data.HashSet as HS
+import qualified Data.Text as T
+
+data TableObjId
+ = TOCol !PGCol
+ | TORel !RelName
+ | TOCons !ConstraintName
+ | TOPerm !RoleName !PermType
+ deriving (Show, Eq, Generic)
+
+instance Hashable TableObjId
+
+data SchemaObjId
+ = SOTable !QualifiedTable
+ | SOQTemplate !TQueryName
+ | SOTableObj !QualifiedTable !TableObjId
+ deriving (Eq, Generic)
+
+instance Hashable SchemaObjId
+
+reportSchemaObj :: SchemaObjId -> T.Text
+reportSchemaObj (SOTable tn) = "table " <> qualTableToTxt tn
+reportSchemaObj (SOQTemplate qtn) =
+ "query-template " <> getTQueryName qtn
+reportSchemaObj (SOTableObj tn (TOCol cn)) =
+ "column " <> qualTableToTxt tn <> "." <> getPGColTxt cn
+reportSchemaObj (SOTableObj tn (TORel cn)) =
+ "relationship " <> qualTableToTxt tn <> "." <> getRelTxt cn
+reportSchemaObj (SOTableObj tn (TOCons cn)) =
+ "constraint " <> qualTableToTxt tn <> "." <> getConstraintTxt cn
+reportSchemaObj (SOTableObj tn (TOPerm rn pt)) =
+ "permission " <> qualTableToTxt tn <> "." <> getRoleTxt rn
+ <> "." <> permTypeToCode pt
+
+reportSchemaObjs :: [SchemaObjId] -> T.Text
+reportSchemaObjs = T.intercalate ", " . map reportSchemaObj
+
+instance Show SchemaObjId where
+ show soi = T.unpack $ reportSchemaObj soi
+
+instance ToJSON SchemaObjId where
+ toJSON = String . reportSchemaObj
+
+data SchemaDependency
+ = SchemaDependency
+ { sdObjId :: !SchemaObjId
+ , sdReason :: !T.Text
+ } deriving (Show, Eq)
+
+$(deriveToJSON (aesonDrop 2 snakeCase) ''SchemaDependency)
+
+mkParentDep :: QualifiedTable -> SchemaDependency
+mkParentDep tn = SchemaDependency (SOTable tn) "table"
+
+mkColDep :: T.Text -> QualifiedTable -> PGCol -> SchemaDependency
+mkColDep reason tn col =
+ flip SchemaDependency reason . SOTableObj tn $ TOCol col
+
+class CachedSchemaObj a where
+ dependsOn :: a -> [SchemaDependency]
+
+isDependentOn :: (CachedSchemaObj a) => (T.Text -> Bool) -> SchemaObjId -> a -> Bool
+isDependentOn reasonFn objId = any compareFn . dependsOn
+ where
+ compareFn (SchemaDependency depObjId rsn) = induces objId depObjId && reasonFn rsn
+ induces (SOTable tn1) (SOTable tn2) = tn1 == tn2
+ induces (SOTable tn1) (SOTableObj tn2 _) = tn1 == tn2
+ induces objId1 objId2 = objId1 == objId2
+
+data QueryTemplateInfo
+ = QueryTemplateInfo
+ { qtiName :: !TQueryName
+ , qtiQuery :: !QueryT
+ , qtiDeps :: ![SchemaDependency]
+ } deriving (Show, Eq)
+
+$(deriveToJSON (aesonDrop 3 snakeCase) ''QueryTemplateInfo)
+
+instance CachedSchemaObj QueryTemplateInfo where
+ dependsOn = qtiDeps
+
+type QTemplateCache = M.HashMap TQueryName QueryTemplateInfo
+
+data PGColInfo
+ = PGColInfo
+ { pgiName :: !PGCol
+ , pgiType :: !PGColType
+ } deriving (Show, Eq)
+
+$(deriveToJSON (aesonDrop 3 snakeCase) ''PGColInfo)
+
+data RelInfo
+ = RelInfo
+ { riName :: !RelName
+ , riType :: !RelType
+ , riMapping :: ![(PGCol, PGCol)]
+ , riRTable :: !QualifiedTable
+ , riDeps :: ![SchemaDependency]
+ } deriving (Show, Eq)
+
+$(deriveToJSON (aesonDrop 2 snakeCase) ''RelInfo)
+
+instance CachedSchemaObj RelInfo where
+ dependsOn = riDeps
+
+data FieldInfo
+ = FIColumn !PGColInfo
+ | FIRelationship !RelInfo
+ deriving (Show, Eq)
+
+$(deriveToJSON
+ defaultOptions { constructorTagModifier = snakeCase . drop 2
+ , sumEncoding = TaggedObject "type" "detail"
+ }
+ ''FieldInfo)
+
+fieldInfoToEither :: FieldInfo -> Either PGColInfo RelInfo
+fieldInfoToEither (FIColumn l) = Left l
+fieldInfoToEither (FIRelationship r) = Right r
+
+partitionFieldInfos :: [FieldInfo] -> ([PGColInfo], [RelInfo])
+partitionFieldInfos = partitionFieldInfosWith (id, id)
+
+partitionFieldInfosWith :: (PGColInfo -> a, RelInfo -> b)
+ -> [FieldInfo] -> ([a], [b])
+partitionFieldInfosWith fns =
+ partitionEithers . map (biMapEither fns . fieldInfoToEither)
+ where
+ biMapEither (f1, f2) = either (Left . f1) (Right . f2)
+
+type FieldInfoMap = M.HashMap FieldName FieldInfo
+
+getCols :: FieldInfoMap -> [PGColInfo]
+getCols fim = lefts $ map fieldInfoToEither $ M.elems fim
+
+getRels :: FieldInfoMap -> [RelInfo]
+getRels fim = rights $ map fieldInfoToEither $ M.elems fim
+
+isPGColInfo :: FieldInfo -> Bool
+isPGColInfo (FIColumn _) = True
+isPGColInfo _ = False
+
+instance ToJSON S.BoolExp where
+ toJSON = String . T.pack . show
+
+data InsPermInfo
+ = InsPermInfo
+ { ipiView :: !QualifiedTable
+ , ipiCheck :: !S.BoolExp
+ , ipiAllowUpsert :: !Bool
+ , ipiDeps :: ![SchemaDependency]
+ , ipiRequiredHeaders :: ![T.Text]
+ } deriving (Show, Eq)
+
+$(deriveToJSON (aesonDrop 3 snakeCase) ''InsPermInfo)
+
+instance CachedSchemaObj InsPermInfo where
+ dependsOn = ipiDeps
+
+data SelPermInfo
+ = SelPermInfo
+ { spiCols :: !(HS.HashSet PGCol)
+ , spiTable :: !QualifiedTable
+ , spiFilter :: !S.BoolExp
+ , spiDeps :: ![SchemaDependency]
+ , spiRequiredHeaders :: ![T.Text]
+ } deriving (Show, Eq)
+
+$(deriveToJSON (aesonDrop 3 snakeCase) ''SelPermInfo)
+
+instance CachedSchemaObj SelPermInfo where
+ dependsOn = spiDeps
+
+data UpdPermInfo
+ = UpdPermInfo
+ { upiCols :: !(HS.HashSet PGCol)
+ , upiTable :: !QualifiedTable
+ , upiFilter :: !S.BoolExp
+ , upiDeps :: ![SchemaDependency]
+ , upiRequiredHeaders :: ![T.Text]
+ } deriving (Show, Eq)
+
+$(deriveToJSON (aesonDrop 3 snakeCase) ''UpdPermInfo)
+
+instance CachedSchemaObj UpdPermInfo where
+ dependsOn = upiDeps
+
+data DelPermInfo
+ = DelPermInfo
+ { dpiTable :: !QualifiedTable
+ , dpiFilter :: !S.BoolExp
+ , dpiDeps :: ![SchemaDependency]
+ , dpiRequiredHeaders :: ![T.Text]
+ } deriving (Show, Eq)
+
+$(deriveToJSON (aesonDrop 3 snakeCase) ''DelPermInfo)
+
+instance CachedSchemaObj DelPermInfo where
+ dependsOn = dpiDeps
+
+mkRolePermInfo :: RolePermInfo
+mkRolePermInfo = RolePermInfo Nothing Nothing Nothing Nothing
+
+data RolePermInfo
+ = RolePermInfo
+ { _permIns :: !(Maybe InsPermInfo)
+ , _permSel :: !(Maybe SelPermInfo)
+ , _permUpd :: !(Maybe UpdPermInfo)
+ , _permDel :: !(Maybe DelPermInfo)
+ } deriving (Show, Eq)
+
+$(deriveToJSON (aesonDrop 5 snakeCase) ''RolePermInfo)
+makeLenses ''RolePermInfo
+
+type RolePermInfoMap = M.HashMap RoleName RolePermInfo
+
+data TableInfo
+ = TableInfo
+ { tiName :: !QualifiedTable
+ , tiFieldInfoMap :: !FieldInfoMap
+ , tiRolePermInfoMap :: !RolePermInfoMap
+ } deriving (Show, Eq)
+
+$(deriveToJSON (aesonDrop 2 snakeCase) ''TableInfo)
+
+mkTableInfo :: QualifiedTable -> [(PGCol, PGColType)] -> TableInfo
+mkTableInfo tn cols =
+ TableInfo tn colMap $ M.fromList []
+ where
+ colMap = M.fromList $ map f cols
+ f (cn, ct) = (fromPGCol cn, FIColumn $ PGColInfo cn ct)
+
+type TableCache = M.HashMap QualifiedTable TableInfo -- info of all tables
+
+data SchemaCache
+ = SchemaCache
+ { scTables :: !TableCache
+ , scQTemplates :: !QTemplateCache
+ } deriving (Show, Eq)
+
+$(deriveToJSON (aesonDrop 2 snakeCase) ''SchemaCache)
+
+class (Monad m) => CacheRM m where
+
+ -- Get the schema cache
+ askSchemaCache :: m SchemaCache
+
+instance (Monad m) => CacheRM (StateT SchemaCache m) where
+ askSchemaCache = get
+
+class (CacheRM m) => CacheRWM m where
+
+ -- Get the schema cache
+ writeSchemaCache :: SchemaCache -> m ()
+
+instance (Monad m) => CacheRWM (StateT SchemaCache m) where
+ writeSchemaCache = put
+
+addQTemplateToCache :: (QErrM m, CacheRWM m)
+ => QueryTemplateInfo -> m ()
+addQTemplateToCache qti = do
+ sc <- askSchemaCache
+ let templateCache = scQTemplates sc
+ case M.lookup qtn templateCache of
+ Just _ -> throw500 $ "template already exists in cache " <>> qtn
+ Nothing -> do
+ let newTemplateCache = M.insert qtn qti templateCache
+ writeSchemaCache $ sc {scQTemplates = newTemplateCache}
+ where
+ qtn = qtiName qti
+
+delQTemplateFromCache :: (QErrM m, CacheRWM m)
+ => TQueryName -> m ()
+delQTemplateFromCache qtn = do
+ sc <- askSchemaCache
+ let templateCache = scQTemplates sc
+ case M.lookup qtn templateCache of
+ Nothing -> throw500 $ "template does not exist in cache " <>> qtn
+ Just _ -> do
+ let newTemplateCache = M.delete qtn templateCache
+ writeSchemaCache $ sc {scQTemplates = newTemplateCache}
+
+-- instance CacheRM where
+-- askSchemaCache = get
+
+emptySchemaCache :: SchemaCache
+emptySchemaCache = SchemaCache (M.fromList []) (M.fromList [])
+
+modTableCache :: (CacheRWM m) => TableCache -> m ()
+modTableCache tc = do
+ sc <- askSchemaCache
+ writeSchemaCache $ sc { scTables = tc }
+
+addTableToCache :: (QErrM m, CacheRWM m)
+ => TableInfo -> m ()
+addTableToCache ti = do
+ sc <- askSchemaCache
+ assertTableNotExists tn sc
+ modTableCache $ M.insert tn ti $ scTables sc
+ where
+ tn = tiName ti
+
+delTableFromCache :: (QErrM m, CacheRWM m)
+ => QualifiedTable -> m ()
+delTableFromCache tn = do
+ sc <- askSchemaCache
+ void $ getTableInfoFromCache tn sc
+ modTableCache $ M.delete tn $ scTables sc
+
+getTableInfoFromCache :: (QErrM m)
+ => QualifiedTable
+ -> SchemaCache
+ -> m TableInfo
+getTableInfoFromCache tn sc =
+ case M.lookup tn (scTables sc) of
+ Nothing -> throw500 $ "table not found in cache : " <>> tn
+ Just ti -> return ti
+
+assertTableNotExists :: (QErrM m)
+ => QualifiedTable
+ -> SchemaCache
+ -> m ()
+assertTableNotExists tn sc =
+ case M.lookup tn (scTables sc) of
+ Nothing -> return ()
+ Just _ -> throw500 $ "table exists in cache : " <>> tn
+
+modTableInCache :: (QErrM m, CacheRWM m)
+ => (TableInfo -> m TableInfo)
+ -> QualifiedTable
+ -> m ()
+modTableInCache f tn = do
+ sc <- askSchemaCache
+ ti <- getTableInfoFromCache tn sc
+ newTi <- f ti
+ modTableCache $ M.insert tn newTi $ scTables sc
+
+addFldToCache :: (QErrM m, CacheRWM m)
+ => FieldName -> FieldInfo
+ -> QualifiedTable -> m ()
+addFldToCache fn fi =
+ modTableInCache modFieldInfoMap
+ where
+ modFieldInfoMap ti = do
+ let fim = tiFieldInfoMap ti
+ case M.lookup fn fim of
+ Just _ -> throw500 "field already exists "
+ Nothing -> return $
+ ti { tiFieldInfoMap = M.insert fn fi fim }
+
+delFldFromCache :: (QErrM m, CacheRWM m)
+ => FieldName -> QualifiedTable -> m ()
+delFldFromCache fn =
+ modTableInCache modFieldInfoMap
+ where
+ modFieldInfoMap ti = do
+ let fim = tiFieldInfoMap ti
+ case M.lookup fn fim of
+ Just _ -> return $
+ ti { tiFieldInfoMap = M.delete fn fim }
+ Nothing -> throw500 "field does not exist"
+data PermAccessor a where
+ PAInsert :: PermAccessor InsPermInfo
+ PASelect :: PermAccessor SelPermInfo
+ PAUpdate :: PermAccessor UpdPermInfo
+ PADelete :: PermAccessor DelPermInfo
+
+permAccToLens :: PermAccessor a -> Lens' RolePermInfo (Maybe a)
+permAccToLens PAInsert = permIns
+permAccToLens PASelect = permSel
+permAccToLens PAUpdate = permUpd
+permAccToLens PADelete = permDel
+
+permAccToType :: PermAccessor a -> PermType
+permAccToType PAInsert = PTInsert
+permAccToType PASelect = PTSelect
+permAccToType PAUpdate = PTUpdate
+permAccToType PADelete = PTDelete
+
+withPermType :: PermType -> (forall a. PermAccessor a -> b) -> b
+withPermType PTInsert f = f PAInsert
+withPermType PTSelect f = f PASelect
+withPermType PTUpdate f = f PAUpdate
+withPermType PTDelete f = f PADelete
+
+addPermToCache
+ :: (QErrM m, CacheRWM m)
+ => QualifiedTable
+ -> RoleName
+ -> PermAccessor a
+ -> a
+ -> m ()
+addPermToCache tn rn pa i =
+ modTableInCache modRolePermInfo tn
+ where
+ paL = permAccToLens pa
+ modRolePermInfo ti = do
+ let rpim = tiRolePermInfoMap ti
+ rpi = fromMaybe mkRolePermInfo $ M.lookup rn rpim
+ newRPI = rpi & paL .~ Just i
+ assertPermNotExists pa rpi
+ return $ ti { tiRolePermInfoMap = M.insert rn newRPI rpim }
+
+assertPermNotExists
+ :: (QErrM m)
+ => PermAccessor a
+ -> RolePermInfo -> m ()
+assertPermNotExists f rpi =
+ when (isJust $ rpi ^. permAccToLens f) $ throw500 "permission exists"
+
+assertPermExists
+ :: (QErrM m)
+ => PermAccessor a
+ -> RolePermInfo -> m ()
+assertPermExists f rpi =
+ unless (isJust $ rpi ^. permAccToLens f) $ throw500 "permission does not exist"
+
+delPermFromCache
+ :: (QErrM m, CacheRWM m)
+ => PermAccessor a
+ -> RoleName
+ -> QualifiedTable
+ -> m ()
+delPermFromCache pa rn =
+ modTableInCache modRolePermInfo
+ where
+ paL = permAccToLens pa
+ modRolePermInfo ti = do
+ let rpim = tiRolePermInfoMap ti
+ rpi = fromMaybe mkRolePermInfo $ M.lookup rn rpim
+ assertPermExists pa rpi
+ let newRPI = rpi & paL .~ Nothing
+ return $ ti { tiRolePermInfoMap = M.insert rn newRPI rpim }
+
+data TemplateParamInfo
+ = TemplateParamInfo
+ { tpiName :: !TemplateParam
+ , tpiDefault :: !(Maybe Value)
+ } deriving (Show, Eq)
+
+getDependentObjs :: SchemaCache -> SchemaObjId -> [SchemaObjId]
+getDependentObjs = getDependentObjsWith (const True)
+
+getDependentObjsWith :: (T.Text -> Bool) -> SchemaCache -> SchemaObjId -> [SchemaObjId]
+getDependentObjsWith f sc objId =
+ HS.toList $ getDependentObjsRWith f HS.empty sc objId
+
+getDependentObjsRWith :: (T.Text -> Bool)
+ -> HS.HashSet SchemaObjId
+ -> SchemaCache -> SchemaObjId
+ -> HS.HashSet SchemaObjId
+getDependentObjsRWith f visited sc objId =
+ foldr go visited thisLevelDeps
+ where
+ thisLevelDeps = concatMap (getDependentObjsOfTableWith f objId) (scTables sc)
+ <> getDependentObjsOfQTemplateCache objId (scQTemplates sc)
+ go lObjId vis =
+ if HS.member lObjId vis
+ then vis
+ else getDependentObjsRWith f (HS.insert lObjId vis) sc lObjId
+
+getDependentObjsOfQTemplateCache :: SchemaObjId -> QTemplateCache -> [SchemaObjId]
+getDependentObjsOfQTemplateCache objId qtc =
+ map (SOQTemplate . qtiName) $ filter (isDependentOn (const True) objId) $
+ M.elems qtc
+
+getDependentObjsOfTable :: SchemaObjId -> TableInfo -> [SchemaObjId]
+getDependentObjsOfTable objId ti =
+ rels ++ perms
+ where
+ rels = getDependentRelsOfTable (const True) objId ti
+ perms = getDependentPermsOfTable (const True) objId ti
+
+getDependentObjsOfTableWith :: (T.Text -> Bool) -> SchemaObjId -> TableInfo -> [SchemaObjId]
+getDependentObjsOfTableWith f objId ti =
+ rels ++ perms
+ where
+ rels = getDependentRelsOfTable f objId ti
+ perms = getDependentPermsOfTable f objId ti
+
+getDependentRelsOfTable :: (T.Text -> Bool) -> SchemaObjId
+ -> TableInfo -> [SchemaObjId]
+getDependentRelsOfTable rsnFn objId (TableInfo tn fim _) =
+ map (SOTableObj tn . TORel . riName) $
+ filter (isDependentOn rsnFn objId) $ getRels fim
+
+getDependentPermsOfTable :: (T.Text -> Bool) -> SchemaObjId
+ -> TableInfo -> [SchemaObjId]
+getDependentPermsOfTable rsnFn objId (TableInfo tn _ rpim) =
+ concat $ flip M.mapWithKey rpim $
+ \rn rpi -> map (SOTableObj tn . TOPerm rn) $ getDependentPerms' rsnFn objId rpi
+
+getDependentPerms' :: (T.Text -> Bool) -> SchemaObjId -> RolePermInfo -> [PermType]
+getDependentPerms' rsnFn objId (RolePermInfo mipi mspi mupi mdpi) =
+ mapMaybe join
+ [ forM mipi $ toPermRow PTInsert
+ , forM mspi $ toPermRow PTSelect
+ , forM mupi $ toPermRow PTUpdate
+ , forM mdpi $ toPermRow PTDelete
+ ]
+ where
+ toPermRow :: forall a. (CachedSchemaObj a) => PermType -> a -> Maybe PermType
+ toPermRow pt =
+ bool Nothing (Just pt) . isDependentOn rsnFn objId
diff --git a/server/src-lib/Hasura/SQL/DML.hs b/server/src-lib/Hasura/SQL/DML.hs
new file mode 100644
index 00000000..fe2b1542
--- /dev/null
+++ b/server/src-lib/Hasura/SQL/DML.hs
@@ -0,0 +1,572 @@
+{-# LANGUAGE DeriveLift #-}
+{-# LANGUAGE LambdaCase #-}
+{-# LANGUAGE OverloadedStrings #-}
+
+module Hasura.SQL.DML where
+
+import Hasura.Prelude
+import Hasura.SQL.Types
+
+import Language.Haskell.TH.Syntax (Lift)
+
+import qualified Data.ByteString.Builder as BB
+import qualified Data.Text.Encoding as TE
+import qualified Data.Text.Extended as T
+
+infixr 6 <->
+(<->) :: BB.Builder -> BB.Builder -> BB.Builder
+(<->) l r = l <> (BB.char7 ' ') <> r
+{-# INLINE (<->) #-}
+
+paren :: BB.Builder -> BB.Builder
+paren t = BB.char7 '(' <> t <> BB.char7 ')'
+{-# INLINE paren #-}
+
+data Select
+ = Select
+ { selDistinct :: !(Maybe DistinctExpr)
+ , selExtr :: ![Extractor]
+ , selFrom :: !(Maybe FromExp)
+ , selWhere :: !(Maybe WhereFrag)
+ , selGroupBy :: !(Maybe GroupByExp)
+ , selHaving :: !(Maybe HavingExp)
+ , selOrderBy :: !(Maybe OrderByExp)
+ , selLimit :: !(Maybe LimitExp)
+ , selOffset :: !(Maybe OffsetExp)
+ } deriving (Show, Eq)
+
+mkSelect :: Select
+mkSelect = Select Nothing [] Nothing
+ Nothing Nothing Nothing
+ Nothing Nothing Nothing
+
+newtype LimitExp
+ = LimitExp SQLExp
+ deriving (Show, Eq)
+
+instance ToSQL LimitExp where
+ toSQL (LimitExp se) =
+ BB.string7 "LIMIT" <-> toSQL se
+
+newtype OffsetExp
+ = OffsetExp SQLExp
+ deriving (Show, Eq)
+
+instance ToSQL OffsetExp where
+ toSQL (OffsetExp se) =
+ BB.string7 "OFFSET" <-> toSQL se
+
+newtype OrderByExp
+ = OrderByExp [OrderByItem]
+ deriving (Show, Eq)
+
+data OrderByItem
+ = OrderByItem
+ { oColumn :: !(Either PGCol QIden)
+ , oType :: !(Maybe OrderType)
+ , oNulls :: !(Maybe NullsOrder)
+ } deriving (Show, Eq)
+
+instance ToSQL OrderByItem where
+ toSQL (OrderByItem col ot no) =
+ either toSQL toSQL col <-> toSQL ot <-> toSQL no
+
+data OrderType = OTAsc
+ | OTDesc
+ deriving (Show, Eq, Lift)
+
+instance ToSQL OrderType where
+ toSQL OTAsc = BB.string7 "ASC"
+ toSQL OTDesc = BB.string7 "DESC"
+
+data NullsOrder
+ = NFirst
+ | NLast
+ deriving (Show, Eq, Lift)
+
+instance ToSQL NullsOrder where
+ toSQL NFirst = BB.string7 "NULLS FIRST"
+ toSQL NLast = BB.string7 "NULLS LAST"
+
+instance ToSQL OrderByExp where
+ toSQL (OrderByExp l) =
+ BB.string7 "ORDER BY" <-> (", " <+> l)
+
+newtype GroupByExp
+ = GroupByExp [SQLExp]
+ deriving (Show, Eq)
+
+instance ToSQL GroupByExp where
+ toSQL (GroupByExp idens) =
+ BB.string7 "GROUP BY" <-> (", " <+> idens)
+
+newtype FromExp
+ = FromExp [FromItem]
+ deriving (Show, Eq)
+
+instance ToSQL FromExp where
+ toSQL (FromExp items) =
+ BB.string7 "FROM" <-> (", " <+> items)
+
+mkIdenFromExp :: (IsIden a) => a -> FromExp
+mkIdenFromExp a =
+ FromExp [FIIden $ toIden a]
+
+mkSimpleFromExp :: QualifiedTable -> FromExp
+mkSimpleFromExp qt =
+ FromExp [FISimple qt Nothing]
+
+mkSelFromExp :: Bool -> Select -> TableName -> FromItem
+mkSelFromExp isLateral sel tn =
+ FISelect (Lateral isLateral) sel alias
+ where
+ alias = Alias $ toIden tn
+
+mkRowExp :: [(T.Text, SQLExp)] -> SQLExp
+mkRowExp extrs =
+ SEFnApp "json_build_object" args Nothing
+ where
+ args = concat [[SELit t, r] | (t, r) <- extrs]
+
+ -- let
+ -- innerSel = mkSelect { selExtr = extrs }
+
+ -- innerSelName = TableName "e"
+
+ -- -- SELECT r FROM (SELECT col1, col2, .. ) AS r
+ -- outerSel = mkSelect
+ -- { selExtr = [mkExtr innerSelName]
+ -- , selFrom = Just $ FromExp
+ -- [mkSelFromExp False innerSel innerSelName]
+ -- }
+ -- in
+ -- SESelect outerSel
+
+newtype HavingExp
+ = HavingExp BoolExp
+ deriving (Show, Eq)
+
+instance ToSQL HavingExp where
+ toSQL (HavingExp be) =
+ BB.string7 "HAVING" <-> toSQL be
+
+newtype WhereFrag
+ = WhereFrag { getWFBoolExp :: BoolExp }
+ deriving (Show, Eq)
+
+instance ToSQL WhereFrag where
+ toSQL (WhereFrag be) =
+ BB.string7 "WHERE" <-> paren (toSQL be)
+
+instance ToSQL Select where
+ toSQL sel =
+ BB.string7 "SELECT"
+ <-> (toSQL $ selDistinct sel)
+ <-> (", " <+> selExtr sel)
+ <-> (toSQL $ selFrom sel)
+ <-> (toSQL $ selWhere sel)
+ <-> (toSQL $ selGroupBy sel)
+ <-> (toSQL $ selHaving sel)
+ <-> (toSQL $ selOrderBy sel)
+ <-> (toSQL $ selLimit sel)
+ <-> (toSQL $ selOffset sel)
+
+mkSIdenExp :: (IsIden a) => a -> SQLExp
+mkSIdenExp = SEIden . toIden
+
+mkQIdenExp :: (IsIden a, IsIden b) => a -> b -> SQLExp
+mkQIdenExp q t = SEQIden $ mkQIden q t
+
+data Qual
+ = QualIden !Iden
+ | QualTable !QualifiedTable
+ | QualVar !T.Text
+ deriving (Show, Eq)
+
+mkQual :: QualifiedTable -> Qual
+mkQual = QualTable
+
+instance ToSQL Qual where
+ toSQL (QualIden i) = toSQL i
+ toSQL (QualTable qt) = toSQL qt
+ toSQL (QualVar v) =
+ TE.encodeUtf8Builder v
+
+mkQIden :: (IsIden a, IsIden b) => a -> b -> QIden
+mkQIden q t = QIden (QualIden (toIden q)) (toIden t)
+
+data QIden
+ = QIden !Qual !Iden
+ deriving (Show, Eq)
+
+instance ToSQL QIden where
+ toSQL (QIden qual iden) =
+ mconcat [toSQL qual, BB.char7 '.', toSQL iden]
+
+data SQLExp
+ = SEPrep !Int
+ | SELit !T.Text
+ | SEUnsafe !T.Text
+ | SESelect !Select
+ | SEStar
+ | SEIden !Iden
+ | SEQIden !QIden
+ | SEFnApp !T.Text ![SQLExp] !(Maybe OrderByExp)
+ | SEOpApp !T.Text ![SQLExp]
+ | SETyAnn !SQLExp !T.Text
+ | SECond !BoolExp !SQLExp !SQLExp
+ | SEBool !BoolExp
+ | SEExcluded !T.Text
+ deriving (Show, Eq)
+
+newtype Alias
+ = Alias { getAlias :: Iden }
+ deriving (Show, Eq)
+
+instance ToSQL Alias where
+ toSQL (Alias iden) = "AS" <-> toSQL iden
+
+instance ToSQL SQLExp where
+ toSQL (SEPrep argNumber) =
+ BB.char7 '$' <> BB.intDec argNumber
+ toSQL (SELit tv) =
+ TE.encodeUtf8Builder $ pgFmtLit tv
+ toSQL (SEUnsafe t) =
+ TE.encodeUtf8Builder t
+ toSQL (SESelect se) =
+ paren $ toSQL se
+ toSQL (SEStar) =
+ BB.char7 '*'
+ toSQL (SEIden iden) =
+ toSQL iden
+ toSQL (SEQIden qIden) =
+ toSQL qIden
+ -- https://www.postgresql.org/docs/10/static/sql-expressions.html#SYNTAX-AGGREGATES
+ toSQL (SEFnApp name args mObe) =
+ TE.encodeUtf8Builder name <> paren ((", " <+> args) <-> toSQL mObe)
+ toSQL (SEOpApp op args) =
+ paren (op <+> args)
+ toSQL (SETyAnn e ty) =
+ paren (toSQL e) <> BB.string7 "::" <> TE.encodeUtf8Builder ty
+ toSQL (SECond cond te fe) =
+ BB.string7 "CASE WHEN" <-> toSQL cond <->
+ BB.string7 "THEN" <-> toSQL te <->
+ BB.string7 "ELSE" <-> toSQL fe <->
+ BB.string7 "END"
+ toSQL (SEBool be) = toSQL be
+ toSQL (SEExcluded t) = BB.string7 "EXCLUDED."
+ <> toSQL (PGCol t)
+
+data Extractor = Extractor !SQLExp !(Maybe Alias)
+ deriving (Show, Eq)
+
+getExtrAlias :: Extractor -> Maybe Alias
+getExtrAlias (Extractor _ ma) = ma
+
+mkAliasedExtr :: (IsIden a, IsIden b) => a -> (Maybe b) -> Extractor
+mkAliasedExtr t = mkAliasedExtrFromExp (mkSIdenExp t)
+
+mkAliasedExtrFromExp :: (IsIden a) => SQLExp -> (Maybe a) -> Extractor
+mkAliasedExtrFromExp sqlExp ma = Extractor sqlExp (aliasF <$> ma)
+ where
+ aliasF = Alias . toIden
+
+mkExtr :: (IsIden a) => a -> Extractor
+mkExtr t = Extractor (mkSIdenExp t) Nothing
+
+instance ToSQL Extractor where
+ toSQL (Extractor ce mal) =
+ toSQL ce <-> toSQL mal
+
+data DistinctExpr = DistinctSimple
+ | DistinctOn ![SQLExp]
+ deriving (Show, Eq)
+
+instance ToSQL DistinctExpr where
+ toSQL DistinctSimple = BB.string7 "DISTINCT"
+ toSQL (DistinctOn exps) =
+ BB.string7 "DISTINCT ON" <-> paren ("," <+> exps)
+
+data FromItem
+ = FISimple !QualifiedTable !(Maybe Alias)
+ | FIIden !Iden
+ | FISelect !Lateral !Select !Alias
+ | FIJoin !JoinExpr
+ deriving (Show, Eq)
+
+instance ToSQL FromItem where
+ toSQL (FISimple qt mal) =
+ toSQL qt <-> toSQL mal
+ toSQL (FIIden iden) =
+ toSQL iden
+ toSQL (FISelect mla sel al) =
+ toSQL mla <-> paren (toSQL sel) <-> toSQL al
+ toSQL (FIJoin je) =
+ toSQL je
+
+newtype Lateral = Lateral Bool
+ deriving (Show, Eq)
+
+instance ToSQL Lateral where
+ toSQL (Lateral True) = BB.string7 "LATERAL"
+ toSQL (Lateral False) = mempty
+
+data JoinExpr
+ = JoinExpr { tjeLeft :: !FromItem
+ , tjeType :: !JoinType
+ , tjeRight :: !FromItem
+ , tjeJC :: !JoinCond
+ } deriving (Show, Eq)
+
+instance ToSQL JoinExpr where
+ toSQL je =
+ (toSQL $ tjeLeft je)
+ <-> (toSQL $ tjeType je)
+ <-> (toSQL $ tjeRight je)
+ <-> (toSQL $ tjeJC je)
+
+data JoinType = Inner
+ | LeftOuter
+ | RightOuter
+ | FullOuter
+ deriving (Eq, Show)
+
+instance ToSQL JoinType where
+ toSQL Inner = BB.string7 "INNER JOIN"
+ toSQL LeftOuter = BB.string7 "LEFT OUTER JOIN"
+ toSQL RightOuter = BB.string7 "RIGHT OUTER JOIN"
+ toSQL FullOuter = BB.string7 "FULL OUTER JOIN"
+
+data JoinCond = JoinOn !BoolExp
+ | JoinUsing ![PGCol]
+ deriving (Show, Eq)
+
+instance ToSQL JoinCond where
+ toSQL (JoinOn be) =
+ BB.string7 "ON" <-> (paren $ toSQL be)
+ toSQL (JoinUsing cols) =
+ BB.string7 "USING" <-> paren ("," <+> cols)
+
+data BoolExp = BELit !Bool
+ | BEBin !BinOp !BoolExp !BoolExp
+ | BENot !BoolExp
+ | BECompare !CompareOp !SQLExp !SQLExp
+ | BENull !SQLExp
+ | BENotNull !SQLExp
+ | BEExists !Select
+ deriving (Show, Eq)
+
+mkExists :: QualifiedTable -> BoolExp -> BoolExp
+mkExists qt whereFrag =
+ BEExists mkSelect {
+ selExtr = [Extractor (SEUnsafe "1") Nothing],
+ selFrom = Just $ mkSimpleFromExp qt,
+ selWhere = Just $ WhereFrag whereFrag
+ }
+
+instance ToSQL BoolExp where
+ toSQL (BELit True) = TE.encodeUtf8Builder $ T.squote "true"
+ toSQL (BELit False) = TE.encodeUtf8Builder $ T.squote "false"
+ toSQL (BEBin bo bel ber) =
+ (paren $ toSQL bel) <-> (toSQL bo) <-> (paren $ toSQL ber)
+ toSQL (BENot be) =
+ BB.string7 "NOT" <-> (paren $ toSQL be)
+ toSQL (BECompare co vl vr) =
+ (paren $ toSQL vl) <-> (toSQL co) <-> (paren $ toSQL vr)
+ toSQL (BENull v) =
+ (paren $ toSQL v) <-> BB.string7 "IS NULL"
+ toSQL (BENotNull v) =
+ (paren $ toSQL v) <-> BB.string7 "IS NOT NULL"
+ toSQL (BEExists sel) =
+ BB.string7 "EXISTS " <-> (paren $ toSQL sel)
+
+data BinOp = AndOp
+ | OrOp
+ deriving (Show, Eq)
+
+instance ToSQL BinOp where
+ toSQL AndOp = BB.string7 "AND"
+ toSQL OrOp = BB.string7 "OR"
+
+data CompareOp = SEQ
+ | SGT
+ | SLT
+ | SIN
+ | SNE
+ | SLIKE
+ | SNLIKE
+ | SILIKE
+ | SNILIKE
+ | SSIMILAR
+ | SNSIMILAR
+ | SGTE
+ | SLTE
+ | SNIN
+ deriving (Eq)
+
+instance Show CompareOp where
+ show SEQ = "="
+ show SGT = ">"
+ show SLT = "<"
+ show SIN = "IN"
+ show SNE = "<>"
+ show SGTE = ">="
+ show SLTE = "<="
+ show SNIN = "NOT IN"
+ show SLIKE = "LIKE"
+ show SNLIKE = "NOT LIKE"
+ show SILIKE = "ILIKE"
+ show SNILIKE = "NOT ILIKE"
+ show SSIMILAR = "SIMILAR TO"
+ show SNSIMILAR = "NOT SIMILAR TO"
+
+instance ToSQL CompareOp where
+ toSQL = BB.string7 . show
+
+buildInsVal :: PGCol -> Int -> (PGCol, SQLExp)
+buildInsVal colName argNumber =
+ (colName, SEPrep argNumber)
+
+data SQLDelete
+ = SQLDelete
+ { delTable :: !QualifiedTable
+ , delUsing :: !(Maybe UsingExp)
+ , delWhere :: !(Maybe WhereFrag)
+ , delRet :: !(Maybe RetExp)
+ } deriving (Show, Eq)
+
+data SQLUpdate
+ = SQLUpdate
+ { upTable :: !QualifiedTable
+ , upSet :: !SetExp
+ , upFrom :: !(Maybe FromExp)
+ , upWhere :: !(Maybe WhereFrag)
+ , upRet :: !(Maybe RetExp)
+ } deriving (Show, Eq)
+
+newtype SetExp = SetExp [SetExpItem]
+ deriving (Show, Eq)
+
+newtype SetExpItem = SetExpItem (PGCol, SQLExp)
+ deriving (Show, Eq)
+
+buildSEI :: PGCol -> Int -> SetExpItem
+buildSEI colName argNumber =
+ SetExpItem (colName, SEPrep argNumber)
+
+newtype UsingExp = UsingExp [TableName]
+ deriving (Show, Eq)
+
+instance ToSQL UsingExp where
+ toSQL (UsingExp tables)
+ = BB.string7 "USING" <-> "," <+> tables
+
+newtype RetExp = RetExp [Extractor]
+ deriving (Show, Eq)
+
+returningStar :: RetExp
+returningStar = RetExp [Extractor SEStar Nothing]
+
+instance ToSQL RetExp where
+ toSQL (RetExp [])
+ = mempty
+ toSQL (RetExp exps)
+ = BB.string7 "RETURNING" <-> (", " <+> exps)
+
+instance ToSQL SQLDelete where
+ toSQL sd = BB.string7 "DELETE FROM"
+ <-> (toSQL $ delTable sd)
+ <-> (toSQL $ delUsing sd)
+ <-> (toSQL $ delWhere sd)
+ <-> (toSQL $ delRet sd)
+
+instance ToSQL SQLUpdate where
+ toSQL a = BB.string7 "UPDATE"
+ <-> (toSQL $ upTable a)
+ <-> (toSQL $ upSet a)
+ <-> (toSQL $ upFrom a)
+ <-> (toSQL $ upWhere a)
+ <-> (toSQL $ upRet a)
+
+instance ToSQL SetExp where
+ toSQL (SetExp cvs) =
+ BB.string7 "SET" <-> ("," <+> cvs)
+
+instance ToSQL SetExpItem where
+ toSQL (SetExpItem (col, val)) =
+ toSQL col <-> "=" <-> toSQL val
+
+
+data SQLConflictTarget
+ = SQLColumn ![PGCol]
+ | SQLConstraint !ConstraintName
+ deriving (Show, Eq)
+
+instance ToSQL SQLConflictTarget where
+ toSQL (SQLColumn cols) = BB.string7 "("
+ <-> ("," <+> cols)
+ <-> BB.string7 ")"
+
+ toSQL (SQLConstraint cons) = BB.string7 "ON CONSTRAINT" <-> toSQL cons
+
+data SQLConflict
+ = DoNothing !(Maybe SQLConflictTarget)
+ | Update !SQLConflictTarget !SetExp
+ deriving (Show, Eq)
+
+instance ToSQL SQLConflict where
+ toSQL (DoNothing Nothing) = BB.string7 "ON CONFLICT DO NOTHING"
+ toSQL (DoNothing (Just ct)) = BB.string7 "ON CONFLICT"
+ <-> toSQL ct
+ <-> BB.string7 "DO NOTHING"
+ toSQL (Update ct ex) = BB.string7 "ON CONFLICT"
+ <-> toSQL ct <-> "DO UPDATE"
+ <-> toSQL ex
+
+data SQLInsert = SQLInsert
+ { siTable :: !QualifiedTable
+ , siCols :: ![PGCol]
+ , siTuples :: ![[SQLExp]]
+ , siConflict :: !(Maybe SQLConflict)
+ , siRet :: !(Maybe RetExp)
+ } deriving (Show, Eq)
+
+instance ToSQL SQLInsert where
+ toSQL si =
+ let insTuples = flip map (siTuples si) $ \tupVals ->
+ BB.string7 "(" <-> (", " <+> tupVals) <-> BB.string7 ")"
+ insConflict = maybe (BB.string7 "") toSQL
+ in "INSERT INTO"
+ <-> (toSQL $ siTable si)
+ <-> BB.string7 "("
+ <-> (", " <+> siCols si)
+ <-> BB.string7 ") VALUES"
+ <-> (", " <+> insTuples)
+ <-> (insConflict $ siConflict si)
+ <-> (toSQL $ siRet si)
+
+data CTE
+ = CTESelect !Select
+ | CTEInsert !SQLInsert
+ | CTEUpdate !SQLUpdate
+ | CTEDelete !SQLDelete
+ deriving (Show, Eq)
+
+instance ToSQL CTE where
+ toSQL = \case
+ CTESelect q -> toSQL q
+ CTEInsert q -> toSQL q
+ CTEUpdate q -> toSQL q
+ CTEDelete q -> toSQL q
+
+data SelectWith
+ = SelectWith
+ { swCTEs :: [(Alias, CTE)]
+ , swSelect :: !Select
+ } deriving (Show, Eq)
+
+instance ToSQL SelectWith where
+ toSQL (SelectWith ctes sel) =
+ "WITH " <> (", " <+> map f ctes) <-> toSQL sel
+ where
+ f (Alias al, q) = toSQL al <-> "AS" <-> paren (toSQL q)
diff --git a/server/src-lib/Hasura/SQL/GeoJSON.hs b/server/src-lib/Hasura/SQL/GeoJSON.hs
new file mode 100644
index 00000000..a774ebe1
--- /dev/null
+++ b/server/src-lib/Hasura/SQL/GeoJSON.hs
@@ -0,0 +1,160 @@
+{-# LANGUAGE GeneralizedNewtypeDeriving #-}
+{-# LANGUAGE LambdaCase #-}
+{-# LANGUAGE OverloadedStrings #-}
+
+module Hasura.SQL.GeoJSON
+ ( Point(..)
+ , MultiPoint(..)
+ , LineString(..)
+ , MultiLineString(..)
+ , Polygon(..)
+ , MultiPolygon(..)
+ , GeometryCollection(..)
+ , Geometry(..)
+ ) where
+
+import qualified Data.Aeson as J
+import qualified Data.Aeson.Types as J
+import qualified Data.Text as T
+import qualified Data.Vector as V
+
+import Control.Monad
+import Data.Maybe (maybeToList)
+import Hasura.Prelude
+
+data Position
+ = Position !Double !Double !(Maybe Double)
+ deriving (Show, Eq)
+
+withParsedArray
+ :: (J.FromJSON a)
+ => String -> (V.Vector a -> J.Parser b) -> J.Value -> J.Parser b
+withParsedArray s fn =
+ J.withArray s (mapM J.parseJSON >=> fn)
+
+instance J.FromJSON Position where
+ parseJSON = withParsedArray "Position" $ \arr ->
+ if V.length arr < 2
+ then fail "A Position needs at least 2 elements"
+ -- here we are ignoring anything past 3 elements
+ else return $ Position
+ (arr `V.unsafeIndex` 0)
+ (arr `V.unsafeIndex` 1)
+ (arr V.!? 2)
+
+instance J.ToJSON Position where
+ toJSON (Position a b c)
+ = J.toJSON $ a:b:maybeToList c
+
+newtype Point
+ = Point { unPoint :: Position }
+ deriving (Show, Eq, J.ToJSON, J.FromJSON)
+
+newtype MultiPoint
+ = MultiPoint { unMultiPoint :: [Position] }
+ deriving (Show, Eq, J.ToJSON, J.FromJSON)
+
+data LineString
+ = LineString
+ { _lsFirst :: !Position
+ , _lsSecond :: !Position
+ , _lsRest :: ![Position]
+ } deriving (Show, Eq)
+
+instance J.ToJSON LineString where
+ toJSON (LineString a b rest)
+ = J.toJSON $ a:b:rest
+
+instance J.FromJSON LineString where
+ parseJSON = withParsedArray "LineString" $ \arr ->
+ if V.length arr < 2
+ then fail "A LineString needs at least 2 Positions"
+ -- here we are ignoring anything past 3 elements
+ else
+ let fstPos = arr `V.unsafeIndex` 0
+ sndPos = arr `V.unsafeIndex` 1
+ rest = V.toList $ V.drop 2 arr
+ in return $ LineString fstPos sndPos rest
+
+newtype MultiLineString
+ = MultiLineString { unMultiLineString :: [LineString] }
+ deriving (Show, Eq, J.ToJSON, J.FromJSON)
+
+newtype GeometryCollection
+ = GeometryCollection { unGeometryCollection :: [Geometry] }
+ deriving (Show, Eq, J.ToJSON, J.FromJSON)
+
+data LinearRing
+ = LinearRing
+ { _pFirst :: !Position
+ , _pSecond :: !Position
+ , _pThird :: !Position
+ , _pRest :: ![Position]
+ } deriving (Show, Eq)
+
+instance J.FromJSON LinearRing where
+ parseJSON = withParsedArray "LinearRing" $ \arr ->
+ if V.length arr < 4
+ then fail "A LinearRing needs at least 4 Positions"
+ -- here we are ignoring anything past 3 elements
+ else do
+ let fstPos = arr `V.unsafeIndex` 0
+ sndPos = arr `V.unsafeIndex` 1
+ thrPos = arr `V.unsafeIndex` 2
+ rest = V.drop 3 arr
+ let lastPos = V.last rest
+ unless (fstPos == lastPos) $
+ fail "the first and last locations have to be equal for a LinearRing"
+ return $ LinearRing fstPos sndPos thrPos $ V.toList $ V.init rest
+
+instance J.ToJSON LinearRing where
+ toJSON (LinearRing a b c rest)
+ = J.toJSON $ (V.fromList [a, b, c] <> V.fromList rest) `V.snoc` a
+
+newtype Polygon
+ = Polygon { unPolygon :: [LinearRing] }
+ deriving (Show, Eq, J.ToJSON, J.FromJSON)
+
+newtype MultiPolygon
+ = MultiPolygon { unMultiPolygon :: [Polygon] }
+ deriving (Show, Eq, J.ToJSON, J.FromJSON)
+
+data Geometry
+ = GPoint !Point
+ | GMultiPoint !MultiPoint
+ | GLineString !LineString
+ | GMultiLineString !MultiLineString
+ | GPolygon !Polygon
+ | GMultiPolygon !MultiPolygon
+ | GGeometryCollection !GeometryCollection
+ deriving (Show, Eq)
+
+encToCoords :: (J.ToJSON a) => T.Text -> a -> J.Value
+encToCoords ty a =
+ J.object [ "type" J..= ty, "coordinates" J..= a]
+
+instance J.ToJSON Geometry where
+ toJSON = \case
+ GPoint o -> encToCoords "Point" o
+ GMultiPoint o -> encToCoords "MultiPoint" o
+ GLineString o -> encToCoords "LineString" o
+ GMultiLineString o -> encToCoords "MultiLineString" o
+ GPolygon o -> encToCoords "Polygon" o
+ GMultiPolygon o -> encToCoords "MultiPoylgon" o
+ GGeometryCollection o ->
+ J.object [ "type" J..= ("GeometryCollection"::T.Text)
+ , "geometries" J..= o
+ ]
+
+instance J.FromJSON Geometry where
+ parseJSON = J.withObject "Geometry" $ \o -> do
+ ty <- o J..: "type"
+ case ty of
+ "Point" -> GPoint <$> o J..: "coordinates"
+ "MultiPoint" -> GMultiPoint <$> o J..: "coordinates"
+ "LineString" -> GLineString <$> o J..: "coordinates"
+ "MultiLineString" -> GMultiLineString <$> o J..: "coordinates"
+ "Polygon" -> GPolygon <$> o J..: "coordinates"
+ "MultiPoylgon" -> GMultiPolygon <$> o J..: "coordinates"
+ "GeometryCollection" -> GGeometryCollection <$> o J..: "geometries"
+ _ -> fail $ "unexpected geometry type: " <> ty
diff --git a/server/src-lib/Hasura/SQL/Time.hs b/server/src-lib/Hasura/SQL/Time.hs
new file mode 100644
index 00000000..74b449e9
--- /dev/null
+++ b/server/src-lib/Hasura/SQL/Time.hs
@@ -0,0 +1,119 @@
+{-# LANGUAGE BangPatterns #-}
+{-# LANGUAGE ScopedTypeVariables #-}
+
+-- |
+-- Module: Data.Aeson.Parser.Time
+-- Copyright: (c) 2015-2016 Bryan O'Sullivan
+-- License: BSD3
+-- Maintainer: Bryan O'Sullivan
+-- Stability: experimental
+-- Portability: portable
+--
+-- Parsers for parsing dates and times.
+
+module Hasura.SQL.Time
+ ( ZonedTimeOfDay(..)
+ ) where
+
+import Control.Monad (void, when)
+import Data.Attoparsec.Text as A
+import Data.Bits ((.&.))
+import Data.Char (isDigit, ord)
+import Data.Fixed (Fixed (MkFixed), Pico)
+import Data.Int (Int64)
+import Data.Maybe (fromMaybe)
+import Hasura.Prelude
+
+import qualified Data.Aeson.Types as Aeson
+import qualified Data.Text as T
+import qualified Data.Time.LocalTime as Local
+
+toPico :: Integer -> Pico
+toPico = MkFixed
+
+-- | Parse a two-digit integer (e.g. day of month, hour).
+twoDigits :: Parser Int
+twoDigits = do
+ a <- digit
+ b <- digit
+ let c2d c = ord c .&. 15
+ return $! c2d a * 10 + c2d b
+
+-- | Parse a time of the form @HH:MM[:SS[.SSS]]@.
+timeOfDay :: Parser Local.TimeOfDay
+timeOfDay = do
+ h <- twoDigits
+ m <- char ':' *> twoDigits
+ s <- option 0 (char ':' *> seconds)
+ if h < 24 && m < 60 && s < 61
+ then return (Local.TimeOfDay h m s)
+ else fail "invalid time"
+
+data T = T {-# UNPACK #-} !Int {-# UNPACK #-} !Int64
+
+-- | Parse a count of seconds, with the integer part being two digits
+-- long.
+seconds :: Parser Pico
+seconds = do
+ real <- twoDigits
+ mc <- peekChar
+ case mc of
+ Just '.' -> do
+ t <- anyChar *> takeWhile1 isDigit
+ return $! parsePicos real t
+ _ -> return $! fromIntegral real
+ where
+ parsePicos a0 t = toPico (fromIntegral (t' * 10^n))
+ where T n t' = T.foldl' step (T 12 (fromIntegral a0)) t
+ step ma@(T m a) c
+ | m <= 0 = ma
+ | otherwise = T (m-1) (10 * a + fromIntegral (ord c) .&. 15)
+
+-- | Parse a time zone, and return 'Nothing' if the offset from UTC is
+-- zero. (This makes some speedups possible.)
+timeZone :: Parser (Maybe Local.TimeZone)
+timeZone = do
+ let maybeSkip c = do ch <- peekChar'; when (ch == c) (void anyChar)
+ maybeSkip ' '
+ ch <- satisfy $ \c -> c == 'Z' || c == '+' || c == '-'
+ if ch == 'Z'
+ then return Nothing
+ else do
+ h <- twoDigits
+ mm <- peekChar
+ m <- case mm of
+ Just ':' -> anyChar *> twoDigits
+ Just d | isDigit d -> twoDigits
+ _ -> return 0
+ let off | ch == '-' = negate off0
+ | otherwise = off0
+ off0 = h * 60 + m
+ case undefined of
+ _ | off == 0 ->
+ return Nothing
+ | off < -720 || off > 840 || m > 59 ->
+ fail "invalid time zone offset"
+ | otherwise ->
+ let !tz = Local.minutesToTimeZone off
+ in return (Just tz)
+
+data ZonedTimeOfDay
+ = ZonedTimeOfDay
+ { ztodTime :: Local.TimeOfDay
+ , ztodZone :: Local.TimeZone
+ } deriving (Show, Eq)
+
+utc :: Local.TimeZone
+utc = Local.TimeZone 0 False ""
+
+zonedTimeOfDay :: T.Text -> Aeson.Parser ZonedTimeOfDay
+zonedTimeOfDay t =
+ case A.parseOnly (p <* endOfInput) t of
+ Left err -> fail $ "could not parse timetz: " ++ err
+ Right r -> return r
+ where
+ p = ZonedTimeOfDay <$> timeOfDay <*> (fromMaybe utc <$> timeZone)
+
+instance Aeson.FromJSON ZonedTimeOfDay where
+ parseJSON (Aeson.String t) = zonedTimeOfDay t
+ parseJSON _ = fail "Expecting a string for timetz"
diff --git a/server/src-lib/Hasura/SQL/Types.hs b/server/src-lib/Hasura/SQL/Types.hs
new file mode 100644
index 00000000..83b31d93
--- /dev/null
+++ b/server/src-lib/Hasura/SQL/Types.hs
@@ -0,0 +1,306 @@
+{-# LANGUAGE DeriveGeneric #-}
+{-# LANGUAGE DeriveLift #-}
+{-# LANGUAGE GeneralizedNewtypeDeriving #-}
+{-# LANGUAGE OverloadedStrings #-}
+
+module Hasura.SQL.Types where
+
+import qualified Database.PG.Query as Q
+import qualified Database.PG.Query.PTI as PTI
+
+import Hasura.Prelude
+
+import Data.Aeson
+import Data.Aeson.Encoding (text)
+import GHC.Generics
+import Instances.TH.Lift ()
+import Language.Haskell.TH.Syntax (Lift)
+
+import qualified Data.ByteString.Builder as BB
+import qualified Data.Text.Encoding as TE
+import qualified Data.Text.Extended as T
+import qualified Database.PostgreSQL.LibPQ as PQ
+
+class ToSQL a where
+ toSQL :: a -> BB.Builder
+
+instance ToSQL BB.Builder where
+ toSQL x = x
+
+-- instance ToSQL T.Text where
+-- toSQL x = TE.encodeUtf8Builder x
+
+infixr 6 <+>
+(<+>) :: (ToSQL a) => T.Text -> [a] -> BB.Builder
+(<+>) _ [] = mempty
+(<+>) kat (x:xs) =
+ toSQL x <> mconcat [ TE.encodeUtf8Builder kat <> toSQL x' | x' <- xs ]
+{-# INLINE (<+>) #-}
+
+newtype Iden = Iden { getIdenTxt :: T.Text }
+ deriving (Show, Eq, FromJSON, ToJSON)
+
+instance ToSQL Iden where
+ toSQL (Iden t) =
+ TE.encodeUtf8Builder $ pgFmtIden t
+
+class IsIden a where
+ toIden :: a -> Iden
+
+instance IsIden Iden where
+ toIden = id
+
+class DQuote a where
+ dquoteTxt :: a -> T.Text
+
+instance DQuote T.Text where
+ dquoteTxt = id
+
+pgFmtIden :: T.Text -> T.Text
+pgFmtIden x =
+ "\"" <> T.replace "\"" "\"\"" (trimNullChars x) <> "\""
+
+pgFmtLit :: T.Text -> T.Text
+pgFmtLit x =
+ let trimmed = trimNullChars x
+ escaped = "'" <> T.replace "'" "''" trimmed <> "'"
+ slashed = T.replace "\\" "\\\\" escaped in
+ if "\\" `T.isInfixOf` escaped
+ then "E" <> slashed
+ else slashed
+
+trimNullChars :: T.Text -> T.Text
+trimNullChars = T.takeWhile (/= '\x0')
+
+infixr 6 <>>
+(<>>) :: (DQuote a) => T.Text -> a -> T.Text
+(<>>) lTxt a =
+ lTxt <> T.dquote (dquoteTxt a)
+{-# INLINE (<>>) #-}
+
+infixr 6 <<>
+(<<>) :: (DQuote a) => a -> T.Text -> T.Text
+(<<>) a rTxt =
+ T.dquote (dquoteTxt a) <> rTxt
+{-# INLINE (<<>) #-}
+
+instance (ToSQL a) => ToSQL (Maybe a) where
+ toSQL (Just a) = toSQL a
+ toSQL Nothing = mempty
+
+newtype TableName
+ = TableName { getTableTxt :: T.Text }
+ deriving (Show, Eq, FromJSON, ToJSON, Hashable, Q.ToPrepArg, Q.FromCol, Lift)
+
+instance IsIden TableName where
+ toIden (TableName t) = Iden t
+
+instance DQuote TableName where
+ dquoteTxt (TableName t) = t
+
+instance ToSQL TableName where
+ toSQL = toSQL . toIden
+
+newtype ConstraintName
+ = ConstraintName { getConstraintTxt :: T.Text }
+ deriving (Show, Eq, FromJSON, ToJSON, Q.ToPrepArg, Q.FromCol, Hashable, Lift)
+
+instance IsIden ConstraintName where
+ toIden (ConstraintName t) = Iden t
+
+instance ToSQL ConstraintName where
+ toSQL = toSQL . toIden
+
+newtype SchemaName
+ = SchemaName { getSchemaTxt :: T.Text }
+ deriving (Show, Eq, FromJSON, ToJSON, Hashable, Q.ToPrepArg, Q.FromCol, Lift)
+
+publicSchema :: SchemaName
+publicSchema = SchemaName "public"
+
+instance IsIden SchemaName where
+ toIden (SchemaName t) = Iden t
+
+instance ToSQL SchemaName where
+ toSQL = toSQL . toIden
+
+data QualifiedTable
+ = QualifiedTable
+ { qtSchema :: !SchemaName
+ , qtTable :: !TableName
+ } deriving (Show, Eq, Generic, Lift)
+
+instance FromJSON QualifiedTable where
+ parseJSON v@(String _) =
+ QualifiedTable publicSchema <$> parseJSON v
+ parseJSON (Object o) =
+ QualifiedTable <$>
+ o .:? "schema" .!= publicSchema <*>
+ o .: "name"
+ parseJSON _ =
+ fail "expecting a string/object for table"
+
+instance ToJSON QualifiedTable where
+ toJSON (QualifiedTable (SchemaName "public") tn) = toJSON tn
+ toJSON (QualifiedTable sn tn) =
+ object [ "schema" .= sn
+ , "name" .= tn
+ ]
+
+instance ToJSONKey QualifiedTable where
+ toJSONKey = ToJSONKeyText qualTableToTxt (text . qualTableToTxt)
+
+instance DQuote QualifiedTable where
+ dquoteTxt = qualTableToTxt
+
+instance Hashable QualifiedTable
+
+instance ToSQL QualifiedTable where
+ toSQL (QualifiedTable sn tn) =
+ toSQL sn <> BB.string7 "." <> toSQL tn
+
+qualTableToTxt :: QualifiedTable -> T.Text
+qualTableToTxt (QualifiedTable (SchemaName "public") tn) =
+ getTableTxt tn
+qualTableToTxt (QualifiedTable sn tn) =
+ getSchemaTxt sn <> "." <> getTableTxt tn
+
+newtype PGCol
+ = PGCol { getPGColTxt :: T.Text }
+ deriving (Show, Eq, Ord, FromJSON, ToJSON, Hashable, Q.ToPrepArg, Q.FromCol, ToJSONKey, FromJSONKey, Lift)
+
+instance IsIden PGCol where
+ toIden (PGCol t) = Iden t
+
+instance ToSQL PGCol where
+ toSQL = toSQL . toIden
+
+instance DQuote PGCol where
+ dquoteTxt (PGCol t) = t
+
+data PGColType
+ = PGSmallInt
+ | PGInteger
+ | PGBigInt
+ | PGSerial
+ | PGBigSerial
+ | PGFloat
+ | PGDouble
+ | PGNumeric
+ | PGBoolean
+ | PGChar
+ | PGVarchar
+ | PGText
+ | PGDate
+ | PGTimeStampTZ
+ | PGTimeTZ
+ | PGJSON
+ | PGJSONB
+ | PGGeometry
+ | PGGeography
+ | PGUnknown !T.Text
+ deriving (Eq, Lift, Generic)
+
+instance Hashable PGColType
+
+instance Show PGColType where
+ show PGSmallInt = "smallint"
+ show PGInteger = "integer"
+ show PGBigInt = "bigint"
+ show PGSerial = "serial"
+ show PGBigSerial = "bigserial"
+ show PGFloat = "real"
+ show PGDouble = "float8"
+ show PGNumeric = "numeric"
+ show PGBoolean = "boolean"
+ show PGChar = "character"
+ show PGVarchar = "varchar"
+ show PGText = "text"
+ show PGDate = "date"
+ show PGTimeStampTZ = "timestamptz"
+ show PGTimeTZ = "timetz"
+ show PGJSON = "json"
+ show PGJSONB = "jsonb"
+ show PGGeometry = "geometry"
+ show PGGeography = "geography"
+ show (PGUnknown t) = T.unpack t
+
+instance ToJSON PGColType where
+ toJSON pct = String $ T.pack $ show pct
+
+instance ToSQL PGColType where
+ toSQL pct = BB.string7 $ show pct
+
+instance FromJSON PGColType where
+ parseJSON (String "serial") = return PGSerial
+ parseJSON (String "bigserial") = return PGBigSerial
+
+ parseJSON (String "smallint") = return PGSmallInt
+ parseJSON (String "int2") = return PGSmallInt
+
+ parseJSON (String "integer") = return PGInteger
+ parseJSON (String "int4") = return PGInteger
+
+ parseJSON (String "bigint") = return PGBigInt
+ parseJSON (String "int8") = return PGBigInt
+
+ parseJSON (String "real") = return PGFloat
+ parseJSON (String "float4") = return PGFloat
+
+ parseJSON (String "double precision") = return PGDouble
+ parseJSON (String "float8") = return PGDouble
+
+ parseJSON (String "numeric") = return PGNumeric
+ parseJSON (String "decimal") = return PGNumeric
+
+ parseJSON (String "boolean") = return PGBoolean
+ parseJSON (String "bool") = return PGBoolean
+
+ parseJSON (String "character") = return PGChar
+
+ parseJSON (String "varchar") = return PGVarchar
+ parseJSON (String "character varying") = return PGVarchar
+
+ parseJSON (String "text") = return PGText
+ parseJSON (String "citext") = return PGText
+
+ parseJSON (String "date") = return PGDate
+
+ parseJSON (String "timestamptz") = return PGTimeStampTZ
+ parseJSON (String "timestamp with time zone") = return PGTimeStampTZ
+
+ parseJSON (String "timetz") = return PGTimeTZ
+ parseJSON (String "time with time zone") = return PGTimeTZ
+
+ parseJSON (String "json") = return PGJSON
+ parseJSON (String "jsonb") = return PGJSONB
+
+ parseJSON (String "geometry") = return PGGeometry
+ parseJSON (String "geography") = return PGGeography
+
+ parseJSON (String t) = return $ PGUnknown t
+ parseJSON _ =
+ fail "Expecting a string for PGColType"
+
+pgTypeOid :: PGColType -> PQ.Oid
+pgTypeOid PGSmallInt = PTI.int2
+pgTypeOid PGInteger = PTI.int4
+pgTypeOid PGBigInt = PTI.int8
+pgTypeOid PGSerial = PTI.int4
+pgTypeOid PGBigSerial = PTI.int8
+pgTypeOid PGFloat = PTI.float4
+pgTypeOid PGDouble = PTI.float8
+pgTypeOid PGNumeric = PTI.numeric
+pgTypeOid PGBoolean = PTI.bool
+pgTypeOid PGChar = PTI.char
+pgTypeOid PGVarchar = PTI.varchar
+pgTypeOid PGText = PTI.text
+pgTypeOid PGDate = PTI.date
+pgTypeOid PGTimeStampTZ = PTI.timestamptz
+pgTypeOid PGTimeTZ = PTI.timetz
+pgTypeOid PGJSON = PTI.json
+pgTypeOid PGJSONB = PTI.jsonb
+-- we are using the ST_GeomFromGeoJSON($i) instead of $i
+pgTypeOid PGGeometry = PTI.text
+pgTypeOid PGGeography = PTI.text
+pgTypeOid (PGUnknown _) = PTI.auto
diff --git a/server/src-lib/Hasura/SQL/Value.hs b/server/src-lib/Hasura/SQL/Value.hs
new file mode 100644
index 00000000..8e0227bd
--- /dev/null
+++ b/server/src-lib/Hasura/SQL/Value.hs
@@ -0,0 +1,197 @@
+{-# LANGUAGE OverloadedStrings #-}
+
+module Hasura.SQL.Value where
+
+import Hasura.SQL.GeoJSON
+import Hasura.SQL.Time
+import Hasura.SQL.Types
+
+import qualified Database.PG.Query as Q
+import qualified Database.PG.Query.PTI as PTI
+import qualified Hasura.SQL.DML as S
+
+import Hasura.Prelude
+import Data.Aeson
+import Data.Aeson.Internal
+import Data.Int
+import Data.Scientific
+import Data.Time
+
+import qualified Data.Aeson.Text as AE
+import qualified Data.Aeson.Types as AT
+import qualified Data.Text as T
+import qualified Data.Text.Encoding as TE
+import qualified Data.Text.Lazy as TL
+
+import qualified Database.PostgreSQL.LibPQ as PQ
+import qualified PostgreSQL.Binary.Encoding as PE
+
+-- Binary value. Used in prepared sq
+data PGColValue
+ = PGValInteger !Int32
+ | PGValSmallInt !Int16
+ | PGValBigInt !Int64
+ | PGValFloat !Float
+ | PGValDouble !Double
+ | PGValNumeric !Scientific
+ | PGValBoolean !Bool
+ | PGValChar !Char
+ | PGValVarchar !T.Text
+ | PGValText !T.Text
+ | PGValDate !Day
+ | PGValTimeStampTZ !UTCTime
+ | PGValTimeTZ !ZonedTimeOfDay
+ | PGNull !PGColType
+ | PGValJSON !Q.JSON
+ | PGValJSONB !Q.JSONB
+ | PGValGeo !Geometry
+ | PGValUnknown !T.Text
+ deriving (Show, Eq)
+
+txtEncoder :: PGColValue -> S.SQLExp
+txtEncoder colVal = case colVal of
+ PGValInteger i -> S.SELit $ T.pack $ show i
+ PGValSmallInt i -> S.SELit $ T.pack $ show i
+ PGValBigInt i -> S.SELit $ T.pack $ show i
+ PGValFloat f -> S.SELit $ T.pack $ show f
+ PGValDouble d -> S.SELit $ T.pack $ show d
+ PGValNumeric sc -> S.SELit $ T.pack $ show sc
+ PGValBoolean b -> S.SELit $ bool "false" "true" b
+ PGValChar t -> S.SELit $ T.pack $ show t
+ PGValVarchar t -> S.SELit t
+ PGValText t -> S.SELit t
+ PGValDate d -> S.SELit $ T.pack $ showGregorian d
+ PGValTimeStampTZ u ->
+ S.SELit $ T.pack $ formatTime defaultTimeLocale "%FT%T%QZ" u
+ PGValTimeTZ (ZonedTimeOfDay tod tz) ->
+ S.SELit $ T.pack (show tod ++ timeZoneOffsetString tz)
+ PGNull _ ->
+ S.SEUnsafe "NULL"
+ PGValJSON (Q.JSON j) -> S.SELit $ TL.toStrict $
+ AE.encodeToLazyText j
+ PGValJSONB (Q.JSONB j) -> S.SELit $ TL.toStrict $
+ AE.encodeToLazyText j
+ PGValGeo o -> S.SELit $ TL.toStrict $
+ AE.encodeToLazyText o
+ PGValUnknown t -> S.SELit t
+
+binEncoder :: PGColValue -> Q.PrepArg
+binEncoder colVal = case colVal of
+ PGValInteger i ->
+ Q.toPrepVal i
+ PGValSmallInt i ->
+ Q.toPrepVal i
+ PGValBigInt i ->
+ Q.toPrepVal i
+ PGValFloat f ->
+ Q.toPrepVal f
+ PGValDouble d ->
+ Q.toPrepVal d
+ PGValNumeric sc ->
+ Q.toPrepVal sc
+ PGValBoolean b ->
+ Q.toPrepVal b
+ PGValChar t ->
+ Q.toPrepVal t
+ PGValVarchar t ->
+ Q.toPrepVal t
+ PGValText t ->
+ Q.toPrepVal t
+ PGValDate d ->
+ Q.toPrepVal d
+ PGValTimeStampTZ u ->
+ Q.toPrepVal u
+ PGValTimeTZ (ZonedTimeOfDay t z) ->
+ Q.toPrepValHelper PTI.timetz PE.timetz_int (t, z)
+ PGNull ty ->
+ (pgTypeOid ty, Nothing)
+ PGValJSON u ->
+ Q.toPrepVal u
+ PGValJSONB u ->
+ Q.toPrepVal u
+ PGValGeo o ->
+ Q.toPrepVal $ TL.toStrict $ AE.encodeToLazyText o
+ PGValUnknown t ->
+ (PTI.auto, Just (TE.encodeUtf8 t, PQ.Text))
+
+parsePGValue' :: PGColType
+ -> Value
+ -> AT.Parser PGColValue
+parsePGValue' ty Null =
+ return $ PGNull ty
+parsePGValue' PGSmallInt val =
+ PGValSmallInt <$> parseJSON val
+parsePGValue' PGInteger val =
+ PGValInteger <$> parseJSON val
+parsePGValue' PGBigInt val =
+ PGValBigInt <$> parseJSON val
+parsePGValue' PGSerial val =
+ PGValInteger <$> parseJSON val
+parsePGValue' PGBigSerial val =
+ PGValBigInt <$> parseJSON val
+parsePGValue' PGFloat val =
+ PGValFloat <$> parseJSON val
+parsePGValue' PGDouble val =
+ PGValDouble <$> parseJSON val
+parsePGValue' PGNumeric val =
+ PGValNumeric <$> parseJSON val
+parsePGValue' PGBoolean val =
+ PGValBoolean <$> parseJSON val
+parsePGValue' PGChar val =
+ PGValChar <$> parseJSON val
+parsePGValue' PGVarchar val =
+ PGValVarchar <$> parseJSON val
+parsePGValue' PGText val =
+ PGValText <$> parseJSON val
+parsePGValue' PGDate val =
+ PGValDate <$> parseJSON val
+parsePGValue' PGTimeStampTZ val =
+ PGValTimeStampTZ <$> parseJSON val
+parsePGValue' PGTimeTZ val =
+ PGValTimeTZ <$> parseJSON val
+parsePGValue' PGJSON val =
+ PGValJSON . Q.JSON <$> parseJSON val
+parsePGValue' PGJSONB val =
+ PGValJSONB . Q.JSONB <$> parseJSON val
+parsePGValue' PGGeometry val =
+ PGValGeo <$> parseJSON val
+parsePGValue' PGGeography val =
+ PGValGeo <$> parseJSON val
+parsePGValue' (PGUnknown _) (String t) =
+ return $ PGValUnknown t
+parsePGValue' (PGUnknown tyName) _ =
+ fail $ "A string is expected for type : " ++ T.unpack tyName
+
+parsePGValue :: PGColType -> Value -> AT.Parser PGColValue
+parsePGValue pct val =
+ case val of
+ String t -> parsePGValue' pct val <|> return (PGValUnknown t)
+ _ -> parsePGValue' pct val
+
+convToBin :: PGColType
+ -> Value
+ -> AT.Parser Q.PrepArg
+convToBin ty val =
+ binEncoder <$> parsePGValue ty val
+
+convToTxt :: PGColType
+ -> Value
+ -> AT.Parser S.SQLExp
+convToTxt ty val =
+ txtEncoder <$> parsePGValue ty val
+
+readEitherTxt :: (Read a) => T.Text -> Either String a
+readEitherTxt = readEither . T.unpack
+
+iresToEither :: IResult a -> Either String a
+iresToEither (IError _ msg) = Left msg
+iresToEither (ISuccess a) = return a
+
+pgValFromJVal :: (FromJSON a) => Value -> Either String a
+pgValFromJVal = iresToEither . ifromJSON
+
+toPrepParam :: Int -> PGColType -> S.SQLExp
+toPrepParam i pct =
+ if pct == PGGeometry || pct == PGGeography
+ then S.SEFnApp "ST_GeomFromGeoJSON" [S.SEPrep i] Nothing
+ else S.SEPrep i
diff --git a/server/src-lib/Hasura/Server/App.hs b/server/src-lib/Hasura/Server/App.hs
new file mode 100644
index 00000000..46f49ecf
--- /dev/null
+++ b/server/src-lib/Hasura/Server/App.hs
@@ -0,0 +1,430 @@
+{-# LANGUAGE DataKinds #-}
+{-# LANGUAGE FlexibleContexts #-}
+{-# LANGUAGE OverloadedStrings #-}
+{-# LANGUAGE ScopedTypeVariables #-}
+{-# LANGUAGE TemplateHaskell #-}
+
+module Hasura.Server.App where
+
+import Control.Concurrent.MVar
+import Control.Lens
+import Data.Char (isSpace)
+import Data.IORef
+
+import Crypto.Hash (Digest, SHA1, hash)
+import Data.Aeson hiding (json)
+import qualified Data.ByteString.Lazy as BL
+import Data.CaseInsensitive (CI (..), original)
+import qualified Data.FileEmbed as FE
+import qualified Data.HashMap.Strict as M
+import qualified Data.String.Conversions as CS
+import qualified Data.Text as T
+import qualified Data.Text.Lazy as TL
+import qualified Data.Text.Lazy.Encoding as TLE
+import Data.Time.Clock (getCurrentTime)
+import qualified Network.Connection as NC
+import qualified Network.HTTP.Client as H
+import qualified Network.HTTP.Client.TLS as HT
+import Network.Wai (strictRequestBody)
+import qualified Network.Wreq as Wq
+import qualified Network.Wreq.Types as WqT
+
+import Web.Spock.Core
+
+import qualified Network.HTTP.Types as N
+import qualified Network.Wai.Internal as WI
+import qualified Network.Wai.Middleware.Static as MS
+
+import qualified Data.Text.Encoding.Error as TE
+import qualified Database.PG.Query as Q
+import qualified Hasura.GraphQL.Execute as GE
+import qualified Hasura.GraphQL.Execute.Result as GE
+import qualified Hasura.GraphQL.Schema as GS
+
+import Hasura.RQL.DDL.Schema.Table
+import Hasura.RQL.DML.Explain
+import Hasura.RQL.DML.QueryTemplate
+import Hasura.RQL.Types
+import Hasura.Server.Init
+import Hasura.Server.Logging
+import Hasura.Prelude hiding (get, put)
+import Hasura.Server.Middleware (corsMiddleware,
+ mkDefaultCorsPolicy)
+import Hasura.Server.Query
+import Hasura.Server.Utils
+import Hasura.SQL.Types
+
+landingPage :: String
+landingPage = $(FE.embedStringFile "src-rsr/landing_page.html")
+
+type RavenLogger = ServerLogger (BL.ByteString, Either QErr BL.ByteString)
+
+ravenLogGen :: LogDetailG (BL.ByteString, Either QErr BL.ByteString)
+ravenLogGen _ (reqBody, res) =
+
+ (status, toJSON <$> logDetail, Just qh, Just size)
+ where
+ status = either qeStatus (const N.status200) res
+ logDetail = either (Just . qErrToLogDetail) (const Nothing) res
+ reqBodyTxt = TL.filter (not . isSpace) $ decodeLBS reqBody
+ qErrToLogDetail qErr =
+ LogDetail reqBodyTxt $ toJSON qErr
+ size = BL.length $ either encode id res
+ qh = T.pack . show $ sha1 reqBody
+ sha1 :: BL.ByteString -> Digest SHA1
+ sha1 = hash . BL.toStrict
+
+decodeLBS :: BL.ByteString -> TL.Text
+decodeLBS = TLE.decodeUtf8With TE.lenientDecode
+
+data AuthMode
+ = AMNoAuth
+ | AMAccessKey !T.Text
+ | AMAccessKeyAndHook !T.Text !T.Text
+ deriving (Show, Eq)
+
+data ServerCtx
+ = ServerCtx
+ { scIsolation :: Q.TxIsolation
+ , scPGPool :: Q.PGPool
+ , scLogger :: RavenLogger
+ , scCacheRef :: IORef (SchemaCache, GS.GCtxMap)
+ , scCacheLock :: MVar ()
+ , scServerMode :: AuthMode
+ }
+
+data HandlerCtx
+ = HandlerCtx
+ { hcServerCtx :: ServerCtx
+ , hcReqBody :: BL.ByteString
+ , hcHeaders :: [(T.Text, T.Text)]
+ }
+
+type Handler = ExceptT QErr (ReaderT HandlerCtx IO)
+
+{-# SCC parseBody #-}
+parseBody :: (FromJSON a) => Handler a
+parseBody = do
+ reqBody <- hcReqBody <$> ask
+ case decode' reqBody of
+ Just jVal -> decodeValue jVal
+ Nothing -> throw400 InvalidJSON "invalid json"
+
+filterHeaders :: [(T.Text, T.Text)] -> [(T.Text, T.Text)]
+filterHeaders hdrs = flip filter hdrs $ \(h, _) ->
+ isXHasuraTxt h && (T.toLower h /= userRoleHeader)
+ && (T.toLower h /= accessKeyHeader)
+
+parseUserInfo :: Handler UserInfo
+parseUserInfo = do
+ headers <- hcHeaders <$> ask
+ let mUserRoleTuple = flip find headers $ \hdr ->
+ userRoleHeader == T.toLower (fst hdr)
+ mUserRoleV = snd <$> mUserRoleTuple
+ userRoleV = fromMaybe "admin" mUserRoleV
+ return $ UserInfo (RoleName userRoleV) $ filterHeaders headers
+
+onlyAdmin :: Handler ()
+onlyAdmin = do
+ (UserInfo uRole _) <- parseUserInfo
+ when (uRole /= adminRole) $
+ throw400 AccessDenied "You have to be an admin to access this endpoint"
+
+buildQCtx :: Handler QCtx
+buildQCtx = do
+ scRef <- scCacheRef . hcServerCtx <$> ask
+ userInfo <- parseUserInfo
+ cache <- liftIO $ readIORef scRef
+ return $ QCtx userInfo $ fst cache
+
+jsonHeader :: (T.Text, T.Text)
+jsonHeader = ("Content-Type", "application/json; charset=utf-8")
+
+fromWebHook
+ :: (MonadIO m)
+ => T.Text
+ -> [N.Header]
+ -> ActionT m [(T.Text, T.Text)]
+fromWebHook urlT reqHeaders = do
+ manager <- liftIO $
+ H.newManager $ HT.mkManagerSettings tlsSimple Nothing
+ let options = Wq.defaults
+ { WqT.headers = filteredHeaders
+ , WqT.checkResponse = Just (\_ _ -> return ())
+ , WqT.manager = Right manager
+ }
+ resp <- liftIO $ Wq.getWith options $ T.unpack urlT
+ let status = resp ^. Wq.responseStatus
+ validateStatus status
+ webHookResp <- decodeBS $ resp ^. Wq.responseBody
+ return $ M.toList webHookResp
+ where
+ filteredHeaders = flip filter reqHeaders $ \(n, _) ->
+ n /= "Content-Length" && n /= "User-Agent" && n /= "Host"
+ && n /= "Origin" && n /= "Referer"
+ tlsSimple = NC.TLSSettingsSimple True False False
+
+ validateStatus statusCode
+ | statusCode == N.status200 = return ()
+ | statusCode == N.status401 = raiseAPIException N.status401 $
+ err401 AccessDenied
+ "Authentication hook unauthorized this request"
+ | otherwise = raiseAPIException N.status500 $
+ err500 Unexpected
+ "Invalid response from authorization hook"
+
+ decodeBS bs = case eitherDecode bs of
+ Left e -> raiseAPIException N.status500 $ err500 Unexpected $
+ "Invalid response from authorization hook; " <> T.pack e
+ Right a -> return a
+
+ raiseAPIException st qErr = do
+ setStatus st
+ uncurry setHeader jsonHeader
+ lazyBytes $ encode qErr
+
+fetchHeaders
+ :: (MonadIO m)
+ => WI.Request
+ -> AuthMode
+ -> ActionT m [(T.Text, T.Text)]
+fetchHeaders req authMode =
+ case authMode of
+ AMNoAuth -> return headers
+
+ AMAccessKey accKey -> do
+ mReqAccessKey <- header accessKeyHeader
+ reqAccessKey <- maybe accessKeyAuthErr return mReqAccessKey
+ validateKeyAndReturnHeaders accKey reqAccessKey
+
+ AMAccessKeyAndHook accKey hook -> do
+ mReqAccessKey <- header accessKeyHeader
+ maybe (fromWebHook hook rawHeaders)
+ (validateKeyAndReturnHeaders accKey)
+ mReqAccessKey
+ where
+ rawHeaders = WI.requestHeaders req
+ headers = headersTxt rawHeaders
+
+ validateKeyAndReturnHeaders key reqKey = do
+ when (reqKey /= key) accessKeyAuthErr
+ return headers
+
+ accessKeyAuthErr = do
+ setStatus N.status401
+ uncurry setHeader jsonHeader
+ lazyBytes $ encode accessKeyErrMsg
+
+ accessKeyErrMsg :: M.HashMap T.Text T.Text
+ accessKeyErrMsg = M.fromList
+ [ ("message", "access keys don't match or not found")
+ , ("code", "access-key-error")
+ ]
+
+ headersTxt hdrsRaw =
+ flip map hdrsRaw $ \(hdrName, hdrVal) ->
+ (CS.cs $ original hdrName, CS.cs hdrVal)
+
+mkSpockAction
+ :: (MonadIO m)
+ => (T.Text -> QErr -> Value)
+ -> ServerCtx
+ -> Handler BL.ByteString
+ -> ActionT m ()
+mkSpockAction qErrEncoder serverCtx handler = do
+ req <- request
+ reqBody <- liftIO $ strictRequestBody req
+
+ headers <- fetchHeaders req $ scServerMode serverCtx
+
+ role <- fromMaybe "admin" <$> header userRoleHeader
+ let handlerState = HandlerCtx serverCtx reqBody headers
+
+ t1 <- liftIO getCurrentTime -- for measuring response time purposes
+ result <- liftIO $ runReaderT (runExceptT handler) handlerState
+ t2 <- liftIO getCurrentTime -- for measuring response time purposes
+
+ liftIO $ logger req (reqBody, result) $ Just (t1, t2)
+ either (qErrToResp role) resToResp result
+ where
+ logger = scLogger serverCtx
+
+ -- encode error response
+ qErrToResp mRole qErr = do
+ setStatus $ qeStatus qErr
+ json $ qErrEncoder mRole qErr
+
+ resToResp resp = do
+ uncurry setHeader jsonHeader
+ lazyBytes resp
+
+
+withLock :: (MonadIO m, MonadError e m)
+ => MVar () -> m a -> m a
+withLock lk action = do
+ acquireLock
+ res <- action `catchError` onError
+ releaseLock
+ return res
+ where
+ onError e = releaseLock >> throwError e
+ acquireLock = liftIO $ takeMVar lk
+ releaseLock = liftIO $ putMVar lk ()
+
+v1ExplainHandler :: RQLExplain -> Handler BL.ByteString
+v1ExplainHandler expQuery = dbAction
+ where
+ dbAction = do
+ onlyAdmin
+ scRef <- scCacheRef . hcServerCtx <$> ask
+ schemaCache <- liftIO $ readIORef scRef
+ pool <- scPGPool . hcServerCtx <$> ask
+ isoL <- scIsolation . hcServerCtx <$> ask
+ runExplainQuery pool isoL userInfo (fst schemaCache) selectQ
+
+ selectQ = rqleQuery expQuery
+ role = rqleRole expQuery
+ headers = M.toList $ rqleHeaders expQuery
+ userInfo = UserInfo role headers
+
+v1QueryHandler :: RQLQuery -> Handler BL.ByteString
+v1QueryHandler query = do
+ lk <- scCacheLock . hcServerCtx <$> ask
+ bool (fst <$> dbAction) (withLock lk dbActionReload) $
+ queryNeedsReload query
+ where
+ -- Hit postgres
+ dbAction = do
+ userInfo <- parseUserInfo
+ scRef <- scCacheRef . hcServerCtx <$> ask
+ schemaCache <- liftIO $ readIORef scRef
+ pool <- scPGPool . hcServerCtx <$> ask
+ isoL <- scIsolation . hcServerCtx <$> ask
+ runQuery pool isoL userInfo (fst schemaCache) query
+
+ -- Also update the schema cache
+ dbActionReload = do
+ (resp, newSc) <- dbAction
+ newGCtxMap <- GS.mkGCtxMap $ scTables newSc
+ scRef <- scCacheRef . hcServerCtx <$> ask
+ liftIO $ writeIORef scRef (newSc, newGCtxMap)
+ return resp
+
+v1Alpha1GQHandler :: GE.GraphQLRequest -> Handler BL.ByteString
+v1Alpha1GQHandler query = do
+ userInfo <- parseUserInfo
+ scRef <- scCacheRef . hcServerCtx <$> ask
+ cache <- liftIO $ readIORef scRef
+ pool <- scPGPool . hcServerCtx <$> ask
+ isoL <- scIsolation . hcServerCtx <$> ask
+ GE.runGQ pool isoL userInfo (snd cache) query
+
+-- v1Alpha1GQSchemaHandler :: Handler BL.ByteString
+-- v1Alpha1GQSchemaHandler = do
+-- scRef <- scCacheRef . hcServerCtx <$> ask
+-- schemaCache <- liftIO $ readIORef scRef
+-- onlyAdmin
+-- GS.generateGSchemaH schemaCache
+
+newtype QueryParser
+ = QueryParser { getQueryParser :: QualifiedTable -> Handler RQLQuery }
+
+queryParsers :: M.HashMap T.Text QueryParser
+queryParsers =
+ M.fromList
+ [ ("select", mkQueryParser RQSelect)
+ , ("insert", mkQueryParser RQInsert)
+ , ("update", mkQueryParser RQUpdate)
+ , ("delete", mkQueryParser RQDelete)
+ , ("count", mkQueryParser RQCount)
+ ]
+ where
+ mkQueryParser f =
+ QueryParser $ \qt -> do
+ obj <- parseBody
+ let val = Object $ M.insert "table" (toJSON qt) obj
+ q <- decodeValue val
+ return $ f q
+
+legacyQueryHandler :: TableName -> T.Text -> Handler BL.ByteString
+legacyQueryHandler tn queryType =
+ case M.lookup queryType queryParsers of
+ Just queryParser -> getQueryParser queryParser qt >>= v1QueryHandler
+ Nothing -> throw404 NotFound "No such resource exists"
+ where
+ qt = QualifiedTable publicSchema tn
+
+app
+ :: Q.TxIsolation
+ -> Maybe String
+ -> RavenLogger
+ -> Q.PGPool
+ -> AuthMode
+ -> CorsConfig
+ -> SpockT IO ()
+app isoLevel mRootDir logger pool mode corsCfg = do
+ cacheRef <- lift $ do
+ pgResp <- liftIO $ runExceptT $ Q.runTx pool (Q.Serializable, Nothing) $ do
+ Q.catchE defaultTxErrorHandler initStateTx
+ sc <- buildSchemaCache
+ (,) sc <$> GS.mkGCtxMap (scTables sc)
+ either initErrExit return pgResp >>= newIORef
+
+ cacheLock <- lift $ newMVar ()
+
+ let serverCtx = ServerCtx isoLevel pool logger cacheRef cacheLock mode
+
+ liftIO $ putStrLn "HasuraDB is now waiting for connections"
+
+ maybe (return ()) (middleware . MS.staticPolicy . MS.addBase) mRootDir
+
+ -- cors middleware
+ unless (ccDisabled corsCfg) $
+ middleware $ corsMiddleware (mkDefaultCorsPolicy $ ccDomain corsCfg)
+
+ get root $ html $ T.pack landingPage
+ get ("v1/template" /> var) $ tmpltGetOrDeleteH serverCtx
+ post ("v1/template" /> var) $ tmpltPutOrPostH serverCtx
+ put ("v1/template" /> var) $ tmpltPutOrPostH serverCtx
+ delete ("v1/template" /> var) $ tmpltGetOrDeleteH serverCtx
+
+ post "v1/query" $ mkSpockAction encodeQErr serverCtx $ do
+ query <- parseBody
+ v1QueryHandler query
+
+ post "v1/query/explain" $ mkSpockAction encodeQErr serverCtx $ do
+ expQuery <- parseBody
+ v1ExplainHandler expQuery
+
+ post "v1alpha1/graphql" $ mkSpockAction GE.encodeGQErr serverCtx $ do
+ query <- parseBody
+ v1Alpha1GQHandler query
+
+ -- get "v1alpha1/graphql/schema" $
+ -- mkSpockAction encodeQErr serverCtx v1Alpha1GQSchemaHandler
+
+ post ("api/1/table" /> var /> var) $ \tableName queryType ->
+ mkSpockAction encodeQErr serverCtx $ legacyQueryHandler (TableName tableName) queryType
+
+ hookAny GET $ \_ -> mkSpockAction encodeQErr serverCtx $
+ throw404 NotFound "resource does not exist"
+
+ where
+ tmpltGetOrDeleteH serverCtx tmpltName = do
+ tmpltArgs <- tmpltArgsFromQueryParams
+ mkSpockAction encodeQErr serverCtx $ mkQTemplateAction tmpltName tmpltArgs
+
+ tmpltPutOrPostH serverCtx tmpltName = do
+ tmpltArgs <- tmpltArgsFromQueryParams
+ mkSpockAction encodeQErr serverCtx $ do
+ bodyTmpltArgs <- parseBody
+ mkQTemplateAction tmpltName $ M.union bodyTmpltArgs tmpltArgs
+
+ tmpltArgsFromQueryParams = do
+ qparams <- params
+ return $ M.fromList $ flip map qparams $
+ \(a, b) -> (TemplateParam a, String b)
+
+ mkQTemplateAction tmpltName tmpltArgs =
+ v1QueryHandler $ RQExecuteQueryTemplate $
+ ExecQueryTemplate (TQueryName tmpltName) tmpltArgs
diff --git a/server/src-lib/Hasura/Server/Init.hs b/server/src-lib/Hasura/Server/Init.hs
new file mode 100644
index 00000000..7e493f43
--- /dev/null
+++ b/server/src-lib/Hasura/Server/Init.hs
@@ -0,0 +1,260 @@
+{-# LANGUAGE FlexibleContexts #-}
+{-# LANGUAGE OverloadedStrings #-}
+
+module Hasura.Server.Init where
+
+import qualified Database.PG.Query as Q
+
+import Network.URI
+import Options.Applicative
+import System.Exit (exitFailure)
+import Text.Read (readMaybe)
+
+import qualified Data.Text as T
+
+import Hasura.Prelude
+import Hasura.RQL.DDL.Utils
+
+data InitError
+ = InitError !String
+ deriving (Show, Eq)
+
+instance Q.FromPGConnErr InitError where
+ fromPGConnErr = InitError . show
+
+instance Q.FromPGTxErr InitError where
+ fromPGTxErr = InitError . show
+
+type AccessKey = T.Text
+
+initErrExit :: (Show e) => e -> IO a
+initErrExit e = print e >> exitFailure
+
+-- clear the hdb_views schema
+initStateTx :: Q.Tx ()
+initStateTx = Q.unitQ clearHdbViews () False
+
+data RawConnInfo =
+ RawConnInfo
+ { connHost :: !(Maybe String)
+ , connPort :: !(Maybe Int)
+ , connUser :: !(Maybe String)
+ , connPassword :: !String
+ , connUrl :: !(Maybe String)
+ , connDatabase :: !(Maybe String)
+ , connOptions :: !(Maybe String)
+ } deriving (Eq, Read, Show)
+
+parseRawConnInfo :: Parser RawConnInfo
+parseRawConnInfo =
+ RawConnInfo
+ <$> optional (strOption ( long "host" <>
+ metavar "HOST" <>
+ help "Postgres server host" ))
+ <*> optional (option auto ( long "port" <>
+ short 'p' <>
+ metavar "PORT" <>
+ help "Postgres server port" ))
+ <*> optional (strOption ( long "user" <>
+ short 'u' <>
+ metavar "USER" <>
+ help "Database user name" ))
+ <*> strOption ( long "password" <>
+ short 'p' <>
+ metavar "PASSWORD" <>
+ value "" <>
+ help "Password of the user" )
+ <*> optional (strOption ( long "database-url" <>
+ metavar "DataBase-URL" <>
+ help "Postgres database URL"))
+ <*> optional (strOption ( long "dbname" <>
+ short 'd' <>
+ metavar "NAME" <>
+ help "Database name to connect to" ))
+ <*> pure Nothing
+
+connInfoErrModifier :: String -> String
+connInfoErrModifier s = "Fatal Error : " ++ s
+
+mkConnInfo :: RawConnInfo -> Either String Q.ConnInfo
+mkConnInfo (RawConnInfo mHost mPort mUser pass mURL mDB opts) =
+ case (mHost, mPort, mUser, mDB, mURL) of
+ (Just host, Just port, Just user, Just db, Nothing) ->
+ return $ Q.ConnInfo host port user pass db opts
+ _ -> throwError "expecting all options for host, port, user and db"
+ -- (_, _, _, _, Just dbURL) -> parseURL dbURL mUser pass mPort mDB opts
+ -- _ -> throwError
+ -- "Invalid options. Expecting database connection params or database-url"
+
+-- parseURL
+-- :: String
+-- -> Maybe String
+-- -> String
+-- -> Maybe Int
+-- -> Maybe String
+-- -> Maybe String
+-- -> Either String Q.ConnInfo
+-- parseURL urlS mUser password mPort mDB opts =
+-- case parseURI urlS of
+-- Nothing -> throwError "database-url is not valid"
+-- Just url -> do
+-- let uriAuth = uriAuthority url
+-- p = uriPath url
+-- parseURIAuth p uriAuth
+-- where
+-- parseURIAuth _ Nothing = throwError "Authorization info not found in database-url"
+-- parseURIAuth pt (Just authInfo) = do
+-- mDbName <- parsePath pt
+-- dbName <- case mDbName of
+-- Nothing -> case mDB of
+-- Nothing -> throwError
+-- "DB name not found in database-url. Expecting --dbname or -d flag"
+-- Just d -> return d
+-- Just d -> return d
+
+-- (user, pass) <- parseUserInfo $ uriUserInfo authInfo
+-- let host = uriRegName authInfo
+-- port <- case parsePort $ uriPort authInfo of
+-- Just p -> return p
+-- Nothing -> case mPort of
+-- Nothing -> throwError
+-- "Port not found in datbase-url. Expecting --port or -p flag"
+-- Just p -> return p
+-- return $ Q.ConnInfo host port user pass dbName opts
+
+-- parsePort "" = Nothing
+-- parsePort s = readMaybe $ tail s
+
+-- parsePath "" = return Nothing
+-- parsePath "/" = return Nothing
+-- parsePath s = do
+-- let l = T.splitOn "/" $ T.pack s
+-- case l of
+-- [] -> return Nothing
+-- [_] -> return Nothing
+-- [_, b] -> return $ Just $ T.unpack b
+-- _ -> throwError $
+-- "Invalid URL path. Expecting / in URL path."
+-- ++ " Occured " ++ s
+
+-- parseUserInfo ui =
+ -- let userI = init ui
+ -- (user, pass) = break (==':') userI
+ -- mNewPass = if null pass || (pass == ":")
+ -- then Nothing
+ -- else Just $ tail pass
+ -- newUser <- case user of
+ -- "" -> case mUser of
+ -- Nothing -> throwError
+ -- "User not found in database-url. Expecting --user or -u flag"
+ -- Just u -> return u
+ -- u -> return u
+ -- newPass <- case mNewPass of
+ -- Nothing -> return password
+ -- Just p -> return p
+ -- return (newUser, newPass)
+
+parseConnInfo :: Parser Q.ConnInfo
+parseConnInfo =
+ Q.ConnInfo
+ <$> strOption ( long "host" <>
+ metavar "HOST" <>
+ help "Postgres server host" )
+ <*> option auto ( long "port" <>
+ short 'p' <>
+ metavar "PORT" <>
+ help "Postgres server port" )
+ <*> strOption ( long "user" <>
+ short 'u' <>
+ metavar "USER" <>
+ help "Database user name" )
+ <*> strOption ( long "password" <>
+ short 'p' <>
+ metavar "PASSWORD" <>
+ value "" <>
+ help "Password of the user" )
+ <*> strOption ( long "dbname" <>
+ short 'd' <>
+ metavar "NAME" <>
+ help "Database name to connect to" )
+ <*> pure Nothing
+
+readIsoLevel :: String -> Either String Q.TxIsolation
+readIsoLevel isoS =
+ case isoS of
+ "read-comitted" -> return Q.ReadCommitted
+ "repeatable-read" -> return Q.RepeatableRead
+ "serializable" -> return Q.ReadCommitted
+ _ -> Left "Only expecting read-comitted / repeatable-read / serializable"
+
+parseTxIsolation :: Parser Q.TxIsolation
+parseTxIsolation =
+ option (eitherReader readIsoLevel) ( long "tx-iso" <>
+ short 'i' <>
+ value Q.ReadCommitted <>
+ metavar "TXISO" <>
+ help "transaction isolation. read-committed / repeatable-read / serializable" )
+
+parseRootDir :: Parser (Maybe String)
+parseRootDir =
+ optional $ strOption ( long "root-dir" <>
+ metavar "STATIC-DIR" <>
+ help "this static dir is served at / and takes precedence over all routes" )
+
+parseConnParams :: Parser Q.ConnParams
+parseConnParams =
+ Q.ConnParams
+ <$> option auto ( long "stripes" <>
+ short 's' <>
+ metavar "NO OF STRIPES" <>
+ value 1 <>
+ help "Number of stripes" )
+ <*> option auto ( long "connections" <>
+ short 'c' <>
+ metavar "NO OF CONNS" <>
+ value 50 <>
+ help "Number of conns that need to be opened to Postgres" )
+ <*> option auto ( long "timeout" <>
+ short 'c' <>
+ metavar "SECONDS" <>
+ value 180 <>
+ help "Each connection's idle time before it is closed" )
+
+parseServerPort :: Parser Int
+parseServerPort =
+ option auto ( long "server-port" <>
+ metavar "PORT" <>
+ value 8080 <>
+ showDefault <>
+ help "Port on which graphql-engine should be served")
+
+parseAccessKey :: Parser (Maybe AccessKey)
+parseAccessKey = optional $ strOption ( long "access-key" <>
+ metavar "SECRET ACCESS KEY" <>
+ help "Secret access key, required to access this instance"
+ )
+
+data CorsConfig
+ = CorsConfig
+ { ccDomain :: !T.Text
+ , ccDisabled :: !Bool
+ } deriving (Show, Eq)
+
+parseCorsConfig :: Parser CorsConfig
+parseCorsConfig =
+ CorsConfig
+ <$> strOption ( long "cors-domain" <>
+ metavar "CORS DOMAIN" <>
+ value "*" <>
+ showDefault <>
+ help "The domain, including scheme and port, to allow CORS for"
+ )
+ <*> switch ( long "disable-cors" <>
+ help "Disable CORS handling"
+ )
+
+parseWebHook :: Parser (Maybe T.Text)
+parseWebHook = optional $ strOption ( long "auth-hook" <>
+ metavar "AUTHENTICATION WEB HOOK" <>
+ help "The authentication webhook, required to authenticate requests"
+ )
diff --git a/server/src-lib/Hasura/Server/Logging.hs b/server/src-lib/Hasura/Server/Logging.hs
new file mode 100644
index 00000000..6ac2ff45
--- /dev/null
+++ b/server/src-lib/Hasura/Server/Logging.hs
@@ -0,0 +1,235 @@
+{-# LANGUAGE FlexibleInstances #-}
+{-# LANGUAGE OverloadedStrings #-}
+{-# LANGUAGE TypeSynonymInstances #-}
+
+-- This is taken from wai-logger and customised for our use
+
+module Hasura.Server.Logging
+ ( ServerLogger
+ , withStdoutLogger
+ , ServerLog(..)
+ , LogDetail(..)
+ , LogDetailG
+ , getRequestHeader
+ ) where
+
+import Control.Exception (bracket)
+import Data.Aeson
+import Data.Bits (shift, (.&.))
+import Data.ByteString.Char8 (ByteString)
+import Data.Int (Int64)
+import Data.List (find)
+import qualified Data.Text as T
+import qualified Data.Text.Encoding as TE
+import qualified Data.Text.Encoding.Error as TE
+import qualified Data.Text.Lazy as TL
+import Data.Time.Clock
+import Data.Word (Word32)
+import Network.Socket (SockAddr (..))
+import Network.Wai (Request (..))
+import System.ByteOrder (ByteOrder (..), byteOrder)
+import System.Log.FastLogger
+import Text.Printf (printf)
+
+import qualified Data.ByteString.Char8 as BS
+import qualified Data.CaseInsensitive as CI
+import qualified Data.HashMap.Strict as M
+import qualified Network.HTTP.Types as N
+
+import Hasura.Server.Utils
+import Hasura.Prelude
+
+
+data ServerLog
+ = ServerLog
+ { slStatus :: !N.Status
+ , slMethod :: !T.Text
+ , slSource :: !T.Text
+ , slPath :: !T.Text
+ , slTimestamp :: !T.Text
+ , slHttpVersion :: !N.HttpVersion
+ , slDetail :: !(Maybe Value)
+ , slRequestId :: !(Maybe T.Text)
+ -- , slHasuraId :: !(Maybe T.Text)
+ , slHasuraRole :: !(Maybe T.Text)
+ , slHasuraMetadata :: !(Maybe Value)
+ , slQueryHash :: !(Maybe T.Text)
+ , slResponseSize :: !(Maybe Int64)
+ , slResponseTime :: !(Maybe T.Text)
+ } deriving (Show, Eq)
+
+instance ToJSON ServerLog where
+ toJSON (ServerLog st met src path ts hv det reqId hRole hMd qh rs rt) =
+ object [ "status" .= N.statusCode st
+ , "method" .= met
+ , "ip" .= src
+ , "url" .= path
+ , "timestamp" .= ts
+ , "http_version" .= show hv
+ , "detail" .= det
+ , "request_id" .= reqId
+ -- , "hasura_id" .= hId
+ , "hasura_role" .= hRole
+ , "hasura_metadata" .= hMd
+ , "query_hash" .= qh
+ , "response_size" .= rs
+ -- , "response_time" .= rt
+ , "query_execution_time" .= rt
+ ]
+
+data LogDetail
+ = LogDetail
+ { ldQuery :: !TL.Text
+ , ldError :: !Value
+ } deriving (Show, Eq)
+
+instance ToJSON LogDetail where
+ toJSON (LogDetail q e) =
+ object [ "request" .= q
+ , "error" .= e
+ ]
+
+-- type ServerLogger = Request -> BL.ByteString -> Either QErr BL.ByteString -> IO ()
+type ServerLogger r = Request -> r -> Maybe (UTCTime, UTCTime) -> IO ()
+
+type LogDetailG r = Request -> r -> (N.Status, Maybe Value, Maybe T.Text, Maybe Int64)
+
+withStdoutLogger :: LogDetailG r -> (ServerLogger r -> IO a) -> IO a
+withStdoutLogger detailF appf =
+ bracket setup teardown $ \(rlogger, _) -> appf rlogger
+ where
+ setup = do
+ getter <- newTimeCache "%FT%T%z"
+ lgrset <- newStdoutLoggerSet defaultBufSize
+ let logger req env timeT = do
+ zdata <- getter
+ let serverLog = mkServerLog detailF zdata req env timeT
+ pushLogStrLn lgrset $ toLogStr $ encode serverLog
+ when (isJust $ slDetail serverLog) $ flushLogStr lgrset
+ remover = rmLoggerSet lgrset
+ return (logger, remover)
+ teardown (_, remover) = void remover
+
+mkServerLog
+ :: LogDetailG r
+ -> FormattedTime
+ -> Request
+ -> r
+ -> Maybe (UTCTime, UTCTime)
+ -> ServerLog
+mkServerLog detailF tmstr req r mTimeT =
+ ServerLog
+ { slStatus = status
+ , slMethod = decodeBS $ requestMethod req
+ , slSource = decodeBS $ getSourceFromFallback req
+ , slPath = decodeBS $ rawPathInfo req
+ , slTimestamp = decodeBS tmstr
+ , slHttpVersion = httpVersion req
+ , slDetail = mDetail
+ , slRequestId = decodeBS <$> getRequestId req
+ -- , slHasuraId = decodeBS <$> getHasuraId req
+ , slHasuraRole = decodeBS <$> getHasuraRole req
+ , slHasuraMetadata = getHasuraMetadata req
+ , slResponseSize = size
+ , slResponseTime = T.pack . show <$> diffTime
+ , slQueryHash = queryHash
+ }
+ where
+ (status, mDetail, queryHash, size) = detailF req r
+ diffTime = case mTimeT of
+ Nothing -> Nothing
+ Just (t1, t2) -> Just $ diffUTCTime t2 t1
+
+decodeBS :: BS.ByteString -> T.Text
+decodeBS = TE.decodeUtf8With TE.lenientDecode
+
+getSourceFromSocket :: Request -> ByteString
+getSourceFromSocket = BS.pack . showSockAddr . remoteHost
+
+getSourceFromFallback :: Request -> ByteString
+getSourceFromFallback req = fromMaybe (getSourceFromSocket req) $ getSource req
+
+getSource :: Request -> Maybe ByteString
+getSource req = addr
+ where
+ maddr = find (\x -> fst x `elem` ["x-real-ip", "x-forwarded-for"]) hdrs
+ addr = fmap snd maddr
+ hdrs = requestHeaders req
+
+requestIdHeader :: T.Text
+requestIdHeader = "x-request-id"
+
+getRequestId :: Request -> Maybe ByteString
+getRequestId = getRequestHeader $ TE.encodeUtf8 requestIdHeader
+
+getHasuraRole :: Request -> Maybe ByteString
+getHasuraRole = getRequestHeader $ TE.encodeUtf8 userRoleHeader
+
+getRequestHeader :: ByteString -> Request -> Maybe ByteString
+getRequestHeader hdrName req = snd <$> mHeader
+ where
+ mHeader = find (\h -> fst h == CI.mk hdrName) hdrs
+ hdrs = requestHeaders req
+
+newtype HasuraMetadata
+ = HasuraMetadata { unHM :: M.HashMap T.Text T.Text } deriving (Show)
+
+instance ToJSON HasuraMetadata where
+ toJSON hash = toJSON $ M.fromList $ map (\(k,v) -> (format k, v)) hdrs
+ where
+ hdrs = M.toList $ unHM hash
+ format = T.map underscorify . T.drop 2
+ underscorify '-' = '_'
+ underscorify c = c
+
+getHasuraMetadata :: Request -> Maybe Value
+getHasuraMetadata req = case md of
+ [] -> Nothing
+ _ -> Just $ toJSON $ HasuraMetadata (M.fromList md)
+ where
+ md = filter filterFixedHeaders rawMd
+ filterFixedHeaders (h,_) = h /= userRoleHeader && h /= accessKeyHeader
+ rawMd = filter (\h -> "x-hasura-" `T.isInfixOf` fst h) hdrs
+ hdrs = map hdrToTxt $ requestHeaders req
+ hdrToTxt (k, v) = (T.toLower $ decodeBS $ CI.original k, decodeBS v)
+
+-- | A type for IP address in numeric string representation.
+type NumericAddress = String
+
+showIPv4 :: Word32 -> Bool -> NumericAddress
+showIPv4 w32 little
+ | little = show b1 ++ "." ++ show b2 ++ "." ++ show b3 ++ "." ++ show b4
+ | otherwise = show b4 ++ "." ++ show b3 ++ "." ++ show b2 ++ "." ++ show b1
+ where
+ t1 = w32
+ t2 = shift t1 (-8)
+ t3 = shift t2 (-8)
+ t4 = shift t3 (-8)
+ b1 = t1 .&. 0x000000ff
+ b2 = t2 .&. 0x000000ff
+ b3 = t3 .&. 0x000000ff
+ b4 = t4 .&. 0x000000ff
+
+showIPv6 :: (Word32,Word32,Word32,Word32) -> String
+showIPv6 (w1,w2,w3,w4) =
+ printf "%x:%x:%x:%x:%x:%x:%x:%x" s1 s2 s3 s4 s5 s6 s7 s8
+ where
+ (s1,s2) = split16 w1
+ (s3,s4) = split16 w2
+ (s5,s6) = split16 w3
+ (s7,s8) = split16 w4
+ split16 w = (h1,h2)
+ where
+ h1 = shift w (-16) .&. 0x0000ffff
+ h2 = w .&. 0x0000ffff
+
+-- | Convert 'SockAddr' to 'NumericAddress'. If the address is
+-- IPv4-embedded IPv6 address, the IPv4 is extracted.
+showSockAddr :: SockAddr -> NumericAddress
+-- HostAddr is network byte order.
+showSockAddr (SockAddrInet _ addr4) = showIPv4 addr4 (byteOrder == LittleEndian)
+-- HostAddr6 is host byte order.
+showSockAddr (SockAddrInet6 _ _ (0,0,0x0000ffff,addr4) _) = showIPv4 addr4 False
+showSockAddr (SockAddrInet6 _ _ (0,0,0,1) _) = "::1"
+showSockAddr (SockAddrInet6 _ _ addr6 _) = showIPv6 addr6
+showSockAddr _ = "unknownSocket"
diff --git a/server/src-lib/Hasura/Server/Middleware.hs b/server/src-lib/Hasura/Server/Middleware.hs
new file mode 100644
index 00000000..02564296
--- /dev/null
+++ b/server/src-lib/Hasura/Server/Middleware.hs
@@ -0,0 +1,74 @@
+{-# LANGUAGE OverloadedStrings #-}
+
+module Hasura.Server.Middleware where
+
+import Data.Maybe (fromMaybe)
+import Network.Wai
+
+import Hasura.Prelude
+import Hasura.Server.Logging (getRequestHeader)
+
+import qualified Data.ByteString as B
+import qualified Data.CaseInsensitive as CI
+import qualified Data.Text as T
+import qualified Data.Text.Encoding as TE
+import qualified Network.HTTP.Types as H
+
+
+data CorsPolicy
+ = CorsPolicy
+ { cpDomain :: !T.Text
+ , cpMethods :: ![T.Text]
+ , cpMaxAge :: !Int
+ } deriving (Show, Eq)
+
+
+mkDefaultCorsPolicy :: T.Text -> CorsPolicy
+mkDefaultCorsPolicy domain =
+ CorsPolicy
+ { cpDomain = domain
+ , cpMethods = ["GET", "POST", "PUT", "PATCH", "DELETE", "OPTIONS"]
+ , cpMaxAge = 1728000
+ }
+
+
+corsMiddleware :: CorsPolicy -> Middleware
+corsMiddleware policy app req sendResp =
+ maybe (app req sendResp) handleCors $ getRequestHeader "Origin" req
+
+ where
+ handleCors origin
+ | cpDomain policy /= "*" && origin /= TE.encodeUtf8 (cpDomain policy) = app req sendResp
+ | otherwise =
+ case requestMethod req of
+ "OPTIONS" -> sendResp $ respondPreFlight origin
+ _ -> app req $ sendResp . injectCorsHeaders origin
+
+ respondPreFlight :: B.ByteString -> Response
+ respondPreFlight origin =
+ setHeaders (mkPreFlightHeaders requestedHeaders)
+ $ injectCorsHeaders origin emptyResponse
+
+ emptyResponse = responseLBS H.status204 [] ""
+ requestedHeaders =
+ fromMaybe "" $ getRequestHeader "Access-Control-Request-Headers" req
+
+ injectCorsHeaders :: B.ByteString -> Response -> Response
+ injectCorsHeaders origin = setHeaders (mkCorsHeaders origin)
+
+ mkPreFlightHeaders allowReqHdrs =
+ [ ("Access-Control-Max-Age", "1728000")
+ , ("Access-Control-Allow-Headers", allowReqHdrs)
+ , ("Content-Length", "0")
+ , ("Content-Type", "text/plain charset=UTF-8")
+ ]
+
+ mkCorsHeaders origin =
+ [ ("Access-Control-Allow-Origin", origin)
+ , ("Access-Control-Allow-Credentials", "true")
+ , ("Access-Control-Allow-Methods",
+ B.intercalate "," $ TE.encodeUtf8 <$> cpMethods policy)
+ ]
+
+ setHeaders hdrs = mapResponseHeaders (\h -> mkRespHdrs hdrs ++ h)
+ mkRespHdrs = map (\(k,v) -> (CI.mk k, v))
diff --git a/server/src-lib/Hasura/Server/Query.hs b/server/src-lib/Hasura/Server/Query.hs
new file mode 100644
index 00000000..bc1262cc
--- /dev/null
+++ b/server/src-lib/Hasura/Server/Query.hs
@@ -0,0 +1,248 @@
+{-# LANGUAGE DeriveLift #-}
+{-# LANGUAGE FlexibleContexts #-}
+{-# LANGUAGE OverloadedStrings #-}
+{-# LANGUAGE TemplateHaskell #-}
+
+module Hasura.Server.Query where
+
+import Data.Aeson
+import Data.Aeson.Casing
+import Data.Aeson.TH
+import Language.Haskell.TH.Syntax (Lift)
+
+import qualified Data.ByteString.Builder as BB
+import qualified Data.ByteString.Lazy as BL
+import qualified Data.Sequence as Seq
+import qualified Data.Text as T
+import qualified Data.Vector as V
+
+import Hasura.Prelude
+import Hasura.RQL.DDL.Metadata
+import Hasura.RQL.DDL.Permission
+import Hasura.RQL.DDL.QueryTemplate
+import Hasura.RQL.DDL.Relationship
+import Hasura.RQL.DDL.Schema.Table
+import Hasura.RQL.DML.Explain
+import Hasura.RQL.DML.QueryTemplate
+import Hasura.RQL.DML.Returning (encodeJSONVector)
+import Hasura.RQL.Types
+import Hasura.Server.Utils
+import Hasura.SQL.Types
+
+import qualified Database.PG.Query as Q
+
+-- data QueryWithTxId
+-- = QueryWithTxId
+-- { qtTxId :: !(Maybe TxId)
+-- , qtQuery :: !RQLQuery
+-- } deriving (Show, Eq)
+
+-- instance FromJSON QueryWithTxId where
+-- parseJSON v@(Object o) =
+-- QueryWithTxId
+-- <$> o .:! "transaction_id"
+-- <*> parseJSON v
+-- parseJSON _ =
+-- fail "expecting on object for query"
+
+data RQLQuery
+ = RQAddExistingTableOrView !TrackTable
+ | RQTrackTable !TrackTable
+ | RQUntrackTable !UntrackTable
+
+ | RQCreateObjectRelationship !CreateObjRel
+ | RQCreateArrayRelationship !CreateArrRel
+ | RQDropRelationship !DropRel
+ | RQSetRelationshipComment !SetRelComment
+
+ | RQCreateInsertPermission !CreateInsPerm
+ | RQCreateSelectPermission !CreateSelPerm
+ | RQCreateUpdatePermission !CreateUpdPerm
+ | RQCreateDeletePermission !CreateDelPerm
+
+ | RQDropInsertPermission !DropInsPerm
+ | RQDropSelectPermission !DropSelPerm
+ | RQDropUpdatePermission !DropUpdPerm
+ | RQDropDeletePermission !DropDelPerm
+ | RQSetPermissionComment !SetPermComment
+
+ | RQInsert !InsertQuery
+ | RQSelect !SelectQuery
+ | RQUpdate !UpdateQuery
+ | RQDelete !DeleteQuery
+ | RQCount !CountQuery
+ | RQBulk ![RQLQuery]
+
+ | RQCreateQueryTemplate !CreateQueryTemplate
+ | RQDropQueryTemplate !DropQueryTemplate
+ | RQExecuteQueryTemplate !ExecQueryTemplate
+ | RQSetQueryTemplateComment !SetQueryTemplateComment
+
+ | RQRunSql !RunSQL
+
+ | RQReplaceMetadata !ReplaceMetadata
+ | RQExportMetadata !ExportMetadata
+ | RQClearMetadata !ClearMetadata
+
+ | RQDumpInternalState !DumpInternalState
+
+ deriving (Show, Eq, Lift)
+
+$(deriveJSON
+ defaultOptions { constructorTagModifier = snakeCase . drop 2
+ , sumEncoding = TaggedObject "type" "args"
+ }
+ ''RQLQuery)
+
+buildTx
+ :: (HDBQuery q)
+ => UserInfo
+ -> SchemaCache
+ -> q
+ -> Either QErr (Q.TxE QErr (BL.ByteString, SchemaCache))
+buildTx userInfo sc q = do
+ p1Res <- withPathK "args" $ runP1 qEnv $ phaseOne q
+ return $ flip runReaderT (qcUserInfo qEnv) $
+ flip runStateT sc $ withPathK "args" $ phaseTwo q p1Res
+ where
+ qEnv = QCtx userInfo sc
+
+runQuery
+ :: (MonadIO m, MonadError QErr m)
+ => Q.PGPool -> Q.TxIsolation
+ -> UserInfo -> SchemaCache
+ -> RQLQuery -> m (BL.ByteString, SchemaCache)
+runQuery pool isoL userInfo sc query = do
+ tx <- liftEither $ buildTxAny userInfo sc query
+ Q.runTx pool (isoL, Nothing) $ setHeadersTx userInfo >> tx
+
+buildExplainTx
+ :: UserInfo
+ -> SchemaCache
+ -> SelectQuery
+ -> Either QErr (Q.TxE QErr BL.ByteString)
+buildExplainTx userInfo sc q = do
+ p1Res <- withPathK "query" $ runP1 qEnv $ phaseOneExplain q
+ res <- return $ flip runReaderT (qcUserInfo qEnv) $
+ flip runStateT sc $ withPathK "query" $ phaseTwoExplain p1Res
+ return $ fst <$> res
+ where
+ qEnv = QCtx userInfo sc
+
+runExplainQuery
+ :: (MonadIO m, MonadError QErr m)
+ => Q.PGPool -> Q.TxIsolation
+ -> UserInfo -> SchemaCache
+ -> SelectQuery -> m BL.ByteString
+runExplainQuery pool isoL userInfo sc query = do
+ tx <- liftEither $ buildExplainTx userInfo sc query
+ Q.runTx pool (isoL, Nothing) $ setHeadersTx userInfo >> tx
+
+queryNeedsReload :: RQLQuery -> Bool
+queryNeedsReload qi = case qi of
+ RQAddExistingTableOrView q -> queryModifiesSchema q
+ RQTrackTable q -> queryModifiesSchema q
+ RQUntrackTable q -> queryModifiesSchema q
+
+ RQCreateObjectRelationship q -> queryModifiesSchema q
+ RQCreateArrayRelationship q -> queryModifiesSchema q
+ RQDropRelationship q -> queryModifiesSchema q
+ RQSetRelationshipComment q -> queryModifiesSchema q
+
+ RQCreateInsertPermission q -> queryModifiesSchema q
+ RQCreateSelectPermission q -> queryModifiesSchema q
+ RQCreateUpdatePermission q -> queryModifiesSchema q
+ RQCreateDeletePermission q -> queryModifiesSchema q
+
+ RQDropInsertPermission q -> queryModifiesSchema q
+ RQDropSelectPermission q -> queryModifiesSchema q
+ RQDropUpdatePermission q -> queryModifiesSchema q
+ RQDropDeletePermission q -> queryModifiesSchema q
+ RQSetPermissionComment q -> queryModifiesSchema q
+
+ RQInsert q -> queryModifiesSchema q
+ RQSelect q -> queryModifiesSchema q
+ RQUpdate q -> queryModifiesSchema q
+ RQDelete q -> queryModifiesSchema q
+ RQCount q -> queryModifiesSchema q
+
+ RQCreateQueryTemplate q -> queryModifiesSchema q
+ RQDropQueryTemplate q -> queryModifiesSchema q
+ RQExecuteQueryTemplate q -> queryModifiesSchema q
+ RQSetQueryTemplateComment q -> queryModifiesSchema q
+
+ RQRunSql q -> queryModifiesSchema q
+
+ RQReplaceMetadata q -> queryModifiesSchema q
+ RQExportMetadata q -> queryModifiesSchema q
+ RQClearMetadata q -> queryModifiesSchema q
+
+ RQDumpInternalState q -> queryModifiesSchema q
+
+ RQBulk qs -> any queryNeedsReload qs
+
+buildTxAny :: UserInfo
+ -> SchemaCache
+ -> RQLQuery
+ -> Either QErr (Q.TxE QErr (BL.ByteString, SchemaCache))
+buildTxAny userInfo sc rq = case rq of
+ RQAddExistingTableOrView q -> buildTx userInfo sc q
+ RQTrackTable q -> buildTx userInfo sc q
+ RQUntrackTable q -> buildTx userInfo sc q
+
+ RQCreateObjectRelationship q -> buildTx userInfo sc q
+ RQCreateArrayRelationship q -> buildTx userInfo sc q
+ RQDropRelationship q -> buildTx userInfo sc q
+ RQSetRelationshipComment q -> buildTx userInfo sc q
+
+ RQCreateInsertPermission q -> buildTx userInfo sc q
+ RQCreateSelectPermission q -> buildTx userInfo sc q
+ RQCreateUpdatePermission q -> buildTx userInfo sc q
+ RQCreateDeletePermission q -> buildTx userInfo sc q
+
+ RQDropInsertPermission q -> buildTx userInfo sc q
+ RQDropSelectPermission q -> buildTx userInfo sc q
+ RQDropUpdatePermission q -> buildTx userInfo sc q
+ RQDropDeletePermission q -> buildTx userInfo sc q
+ RQSetPermissionComment q -> buildTx userInfo sc q
+
+ RQInsert q -> buildTx userInfo sc q
+ RQSelect q -> buildTx userInfo sc q
+ RQUpdate q -> buildTx userInfo sc q
+ RQDelete q -> buildTx userInfo sc q
+ RQCount q -> buildTx userInfo sc q
+
+ RQCreateQueryTemplate q -> buildTx userInfo sc q
+ RQDropQueryTemplate q -> buildTx userInfo sc q
+ RQExecuteQueryTemplate q -> buildTx userInfo sc q
+ RQSetQueryTemplateComment q -> buildTx userInfo sc q
+
+ RQReplaceMetadata q -> buildTx userInfo sc q
+ RQClearMetadata q -> buildTx userInfo sc q
+ RQExportMetadata q -> buildTx userInfo sc q
+
+ RQDumpInternalState q -> buildTx userInfo sc q
+
+ RQRunSql q -> buildTx userInfo sc q
+
+ RQBulk qs ->
+ let f (respList, scf) q = do
+ dbAction <- liftEither $ buildTxAny userInfo scf q
+ (resp, newSc) <- dbAction
+ return ((Seq.|>) respList resp, newSc)
+ in
+ return $ withPathK "args" $ do
+ (respList, finalSc) <- indexedFoldM f (Seq.empty, sc) qs
+ let bsVector = V.fromList $ toList respList
+ return ( BB.toLazyByteString $ encodeJSONVector BB.lazyByteString bsVector
+ , finalSc
+ )
+
+setHeadersTx :: UserInfo -> Q.TxE QErr ()
+setHeadersTx userInfo =
+ forM_ hdrs $ \h -> Q.unitQE defaultTxErrorHandler (mkQ h) () False
+ where
+ hdrs = userHeaders userInfo
+ mkQ (h, v) = Q.fromBuilder $ BB.string7 $
+ T.unpack $
+ "SET LOCAL hasura." <> dropAndSnakeCase h <> " = " <> pgFmtLit v
diff --git a/server/src-lib/Hasura/Server/Utils.hs b/server/src-lib/Hasura/Server/Utils.hs
new file mode 100644
index 00000000..0ec2940d
--- /dev/null
+++ b/server/src-lib/Hasura/Server/Utils.hs
@@ -0,0 +1,24 @@
+{-# LANGUAGE OverloadedStrings #-}
+
+module Hasura.Server.Utils where
+
+import qualified Data.Text as T
+import Hasura.Prelude
+
+dropAndSnakeCase :: T.Text -> T.Text
+dropAndSnakeCase = T.drop 9 . toSnakeCase . T.toLower
+
+toSnakeCase :: T.Text -> T.Text
+toSnakeCase = T.pack . map change . T.unpack
+ where
+ change '-' = '_'
+ change c = c
+
+isXHasuraTxt :: T.Text -> Bool
+isXHasuraTxt = T.isInfixOf "x-hasura-" . T.toLower
+
+userRoleHeader :: T.Text
+userRoleHeader = "x-hasura-role"
+
+accessKeyHeader :: T.Text
+accessKeyHeader = "x-hasura-access-key"
diff --git a/server/src-rsr/first_last.sql b/server/src-rsr/first_last.sql
new file mode 100644
index 00000000..38fbc740
--- /dev/null
+++ b/server/src-rsr/first_last.sql
@@ -0,0 +1,25 @@
+-- Create a function that always returns the first non-NULL item
+CREATE OR REPLACE FUNCTION hdb_catalog.first_agg ( anyelement, anyelement )
+RETURNS anyelement LANGUAGE SQL IMMUTABLE STRICT AS $$
+ SELECT $1;
+$$;
+
+-- And then wrap an aggregate around it
+CREATE AGGREGATE hdb_catalog.FIRST (
+ sfunc = hdb_catalog.first_agg,
+ basetype = anyelement,
+ stype = anyelement
+);
+
+-- Create a function that always returns the last non-NULL item
+CREATE OR REPLACE FUNCTION hdb_catalog.last_agg ( anyelement, anyelement )
+RETURNS anyelement LANGUAGE SQL IMMUTABLE STRICT AS $$
+ SELECT $2;
+$$;
+
+-- And then wrap an aggregate around it
+CREATE AGGREGATE hdb_catalog.LAST (
+ sfunc = hdb_catalog.last_agg,
+ basetype = anyelement,
+ stype = anyelement
+);
diff --git a/server/src-rsr/hdb_metadata.yaml b/server/src-rsr/hdb_metadata.yaml
new file mode 100644
index 00000000..1cf75b1f
--- /dev/null
+++ b/server/src-rsr/hdb_metadata.yaml
@@ -0,0 +1,181 @@
+type: bulk
+args:
+- type: add_existing_table_or_view
+ args:
+ schema: hdb_catalog
+ name: hdb_table
+
+- type: add_existing_table_or_view
+ args:
+ schema: information_schema
+ name: tables
+
+- type: create_object_relationship
+ args:
+ name: detail
+ table:
+ schema: hdb_catalog
+ name: hdb_table
+ using:
+ manual_configuration:
+ remote_table:
+ schema: information_schema
+ name: tables
+ column_mapping:
+ table_schema : table_schema
+ table_name : table_name
+
+- type: add_existing_table_or_view
+ args:
+ schema: information_schema
+ name: schemata
+
+- type: add_existing_table_or_view
+ args:
+ schema: information_schema
+ name: views
+
+- type: add_existing_table_or_view
+ args:
+ schema: hdb_catalog
+ name: hdb_primary_key
+
+- type: create_object_relationship
+ args:
+ name: primary_key
+ table:
+ schema: hdb_catalog
+ name: hdb_table
+ using:
+ manual_configuration:
+ remote_table:
+ schema: hdb_catalog
+ name: hdb_primary_key
+ column_mapping:
+ table_schema : table_schema
+ table_name : table_name
+
+- type: add_existing_table_or_view
+ args:
+ schema: information_schema
+ name: columns
+
+- type: create_array_relationship
+ args:
+ name: columns
+ table:
+ schema: hdb_catalog
+ name: hdb_table
+ using:
+ manual_configuration:
+ remote_table:
+ schema: information_schema
+ name: columns
+ column_mapping:
+ table_schema : table_schema
+ table_name : table_name
+
+- type: add_existing_table_or_view
+ args:
+ schema: hdb_catalog
+ name: hdb_foreign_key_constraint
+
+- type: create_array_relationship
+ args:
+ name: foreign_key_constraints
+ table:
+ schema: hdb_catalog
+ name: hdb_table
+ using:
+ manual_configuration:
+ remote_table:
+ schema: hdb_catalog
+ name: hdb_foreign_key_constraint
+ column_mapping:
+ table_schema : table_schema
+ table_name : table_name
+
+- type: add_existing_table_or_view
+ args:
+ schema: hdb_catalog
+ name: hdb_relationship
+
+- type: create_array_relationship
+ args:
+ name: relationships
+ table:
+ schema: hdb_catalog
+ name: hdb_table
+ using:
+ manual_configuration:
+ remote_table:
+ schema: hdb_catalog
+ name: hdb_relationship
+ column_mapping:
+ table_schema : table_schema
+ table_name : table_name
+
+- type: add_existing_table_or_view
+ args:
+ schema: hdb_catalog
+ name: hdb_permission_agg
+
+- type: create_array_relationship
+ args:
+ name: permissions
+ table:
+ schema: hdb_catalog
+ name: hdb_table
+ using:
+ manual_configuration:
+ remote_table:
+ schema: hdb_catalog
+ name: hdb_permission_agg
+ column_mapping:
+ table_schema : table_schema
+ table_name : table_name
+
+- type: add_existing_table_or_view
+ args:
+ schema: hdb_catalog
+ name: hdb_check_constraint
+
+- type: create_array_relationship
+ args:
+ name: check_constraints
+ table:
+ schema: hdb_catalog
+ name: hdb_table
+ using:
+ manual_configuration:
+ remote_table:
+ schema: hdb_catalog
+ name: hdb_check_constraint
+ column_mapping:
+ table_schema : table_schema
+ table_name : table_name
+
+- type: add_existing_table_or_view
+ args:
+ schema: hdb_catalog
+ name: hdb_unique_constraint
+
+- type: create_array_relationship
+ args:
+ name: unique_constraints
+ table:
+ schema: hdb_catalog
+ name: hdb_table
+ using:
+ manual_configuration:
+ remote_table:
+ schema: hdb_catalog
+ name: hdb_unique_constraint
+ column_mapping:
+ table_schema : table_schema
+ table_name : table_name
+
+- type: add_existing_table_or_view
+ args:
+ schema: hdb_catalog
+ name: hdb_query_template
diff --git a/server/src-rsr/initialise.sql b/server/src-rsr/initialise.sql
new file mode 100644
index 00000000..97135af8
--- /dev/null
+++ b/server/src-rsr/initialise.sql
@@ -0,0 +1,181 @@
+CREATE TABLE hdb_catalog.hdb_version (
+ version TEXT NOT NULL,
+ upgraded_on TIMESTAMPTZ NOT NULL
+);
+
+CREATE UNIQUE INDEX hdb_version_one_row
+ON hdb_catalog.hdb_version((version IS NOT NULL));
+
+CREATE TABLE hdb_catalog.hdb_table
+(
+ table_schema TEXT,
+ table_name TEXT,
+ is_system_defined boolean default false,
+
+ PRIMARY KEY (table_schema, table_name)
+);
+
+CREATE FUNCTION hdb_catalog.hdb_table_oid_check() RETURNS trigger AS
+$function$
+ BEGIN
+ IF (EXISTS (SELECT 1 FROM information_schema.tables st WHERE st.table_schema = NEW.table_schema AND st.table_name = NEW.table_name)) THEN
+ return NEW;
+ ELSE
+ RAISE foreign_key_violation using message = 'table_schema, table_name not in information_schema.tables';
+ return NULL;
+ END IF;
+ END;
+$function$
+LANGUAGE plpgsql;
+
+CREATE TRIGGER hdb_table_oid_check BEFORE INSERT OR UPDATE ON hdb_catalog.hdb_table
+ FOR EACH ROW EXECUTE PROCEDURE hdb_catalog.hdb_table_oid_check();
+
+CREATE TABLE hdb_catalog.hdb_relationship
+(
+ table_schema TEXT,
+ table_name TEXT,
+ rel_name TEXT,
+ rel_type TEXT CHECK (rel_type IN ('object', 'array')),
+ rel_def JSONB NOT NULL,
+ comment TEXT NULL,
+ is_system_defined boolean default false,
+
+ PRIMARY KEY (table_schema, table_name, rel_name),
+ FOREIGN KEY (table_schema, table_name) REFERENCES hdb_catalog.hdb_table(table_schema, table_name)
+);
+
+CREATE TABLE hdb_catalog.hdb_permission
+(
+ table_schema TEXT,
+ table_name TEXT,
+ role_name TEXT,
+ perm_type TEXT CHECK(perm_type IN ('insert', 'select', 'update', 'delete')),
+ perm_def JSONB NOT NULL,
+ comment TEXT NULL,
+ is_system_defined boolean default false,
+
+ PRIMARY KEY (table_schema, table_name, role_name, perm_type),
+ FOREIGN KEY (table_schema, table_name) REFERENCES hdb_catalog.hdb_table(table_schema, table_name)
+);
+
+CREATE VIEW hdb_catalog.hdb_permission_agg AS
+SELECT
+ table_schema,
+ table_name,
+ role_name,
+ json_object_agg(perm_type, perm_def) as permissions
+FROM
+ hdb_catalog.hdb_permission
+GROUP BY
+ table_schema, table_name, role_name;
+
+CREATE TABLE hdb_catalog.hdb_query_template
+(
+ template_name TEXT PRIMARY KEY,
+ template_defn JSONB NOT NULL,
+ comment TEXT NULL,
+ is_system_defined boolean default false
+);
+
+CREATE VIEW hdb_catalog.hdb_foreign_key_constraint AS
+SELECT
+ q.table_schema :: text,
+ q.table_name :: text,
+ q.constraint_name :: text,
+ hdb_catalog.first(q.constraint_oid) :: integer as constraint_oid,
+ hdb_catalog.first(q.ref_table_table_schema) :: text as ref_table_table_schema,
+ hdb_catalog.first(q.ref_table) :: text as ref_table,
+ json_object_agg(ac.attname, afc.attname) as column_mapping,
+ hdb_catalog.first(q.confupdtype) :: text as on_update,
+ hdb_catalog.first(q.confdeltype) :: text as on_delete
+FROM
+ (SELECT
+ ctn.nspname AS table_schema,
+ ct.relname AS table_name,
+ r.conrelid AS table_id,
+ r.conname as constraint_name,
+ r.oid as constraint_oid,
+ cftn.nspname AS ref_table_table_schema,
+ cft.relname as ref_table,
+ r.confrelid as ref_table_id,
+ r.confupdtype,
+ r.confdeltype,
+ UNNEST (r.conkey) AS column_id,
+ UNNEST (r.confkey) AS ref_column_id
+ FROM
+ pg_catalog.pg_constraint r
+ JOIN pg_catalog.pg_class ct
+ ON r.conrelid = ct.oid
+ JOIN pg_catalog.pg_namespace ctn
+ ON ct.relnamespace = ctn.oid
+ JOIN pg_catalog.pg_class cft
+ ON r.confrelid = cft.oid
+ JOIN pg_catalog.pg_namespace cftn
+ ON cft.relnamespace = cftn.oid
+ WHERE
+ r.contype = 'f'
+ ) q
+ JOIN pg_catalog.pg_attribute ac
+ ON q.column_id = ac.attnum
+ AND q.table_id = ac.attrelid
+ JOIN pg_catalog.pg_attribute afc
+ ON q.ref_column_id = afc.attnum
+ AND q.ref_table_id = afc.attrelid
+GROUP BY q.table_schema, q.table_name, q.constraint_name;
+
+CREATE VIEW hdb_catalog.hdb_check_constraint AS
+SELECT
+ n.nspname :: text AS table_schema,
+ ct.relname :: text AS table_name,
+ r.conname :: text as constraint_name,
+ pg_catalog.pg_get_constraintdef(r.oid, true) as check
+FROM
+ pg_catalog.pg_constraint r
+ JOIN pg_catalog.pg_class ct
+ ON r.conrelid = ct.oid
+ JOIN pg_catalog.pg_namespace n
+ ON ct.relnamespace = n.oid
+WHERE
+ r.contype = 'c';
+
+CREATE VIEW hdb_catalog.hdb_unique_constraint AS
+SELECT
+ tc.table_name,
+ tc.constraint_schema AS table_schema,
+ tc.constraint_name as constraint_name,
+ json_agg(kcu.column_name) AS columns
+FROM
+ information_schema.table_constraints tc
+ JOIN information_schema.key_column_usage AS kcu
+ USING (constraint_schema, constraint_name)
+WHERE
+ constraint_type = 'UNIQUE'
+GROUP BY
+ tc.table_name, tc.constraint_schema, tc.constraint_name;
+
+CREATE VIEW hdb_catalog.hdb_primary_key AS
+SELECT
+ tc.table_schema,
+ tc.table_name,
+ tc.constraint_name,
+ json_agg(ccu.column_name) as columns
+FROM
+ information_schema.table_constraints tc
+ JOIN information_schema.constraint_column_usage ccu
+ ON tc.constraint_name = ccu.constraint_name
+WHERE
+ constraint_type = 'PRIMARY KEY'
+GROUP BY
+ tc.table_schema, tc.table_name, tc.constraint_name;
+
+CREATE FUNCTION hdb_catalog.inject_table_defaults(view_schema text, view_name text, tab_schema text, tab_name text) RETURNS void
+LANGUAGE plpgsql AS $$
+ DECLARE
+ r RECORD;
+ BEGIN
+ FOR r IN SELECT column_name, column_default FROM information_schema.columns WHERE table_schema = tab_schema AND table_name = tab_name AND column_default IS NOT NULL LOOP
+ EXECUTE format('ALTER VIEW %I.%I ALTER COLUMN %I SET DEFAULT %s;', view_schema, view_name, r.column_name, r.column_default);
+ END LOOP;
+ END;
+$$;
diff --git a/server/src-rsr/landing_page.html b/server/src-rsr/landing_page.html
new file mode 100644
index 00000000..603b00e1
--- /dev/null
+++ b/server/src-rsr/landing_page.html
@@ -0,0 +1,449 @@
+
+
+
+
+
+
+ Hi! Your GraphQL endpoint on Postgres is ready.
+
+
+
+
+
+
+
+ Hi! Your GraphQL endpoint on Postgres is ready.
+ Now, start building your schema and exploring your GraphQL APIs:
+ Step 1: Install the Hasura CLI
+ Mac / Linux
+
+
curl -L https://cli.hasura.io/install.sh | bash
+
Copy
+
+ Windows
+
+ - Download the hasura installer for 64-bit or 32-bit.
+ - Run the hasura command in your shell (recommended: git-bash).
+
+ Step 2: Initialize a project
+
+ Step 3: Open the Hasura Console
+
+
cd my-project && hasura console
+
Copy
+
+
+
+
+
+
+
+
diff --git a/server/src-rsr/schema.graphql b/server/src-rsr/schema.graphql
new file mode 100644
index 00000000..880d08ef
--- /dev/null
+++ b/server/src-rsr/schema.graphql
@@ -0,0 +1,99 @@
+type __Schema {
+ types: [__Type!]!
+ queryType: __Type!
+ mutationType: __Type
+ subscriptionType: __Type
+ directives: [__Directive!]!
+}
+
+type __Type {
+ kind: __TypeKind!
+ name: String
+ description: String
+
+ # OBJECT and INTERFACE only
+ fields(includeDeprecated: Boolean = false): [__Field!]
+
+ # OBJECT only
+ interfaces: [__Type!]
+
+ # INTERFACE and UNION only
+ possibleTypes: [__Type!]
+
+ # ENUM only
+ enumValues(includeDeprecated: Boolean = false): [__EnumValue!]
+
+ # INPUT_OBJECT only
+ inputFields: [__InputValue!]
+
+ # NON_NULL and LIST only
+ ofType: __Type
+}
+
+type __Field {
+ name: String!
+ description: String
+ args: [__InputValue!]!
+ type: __Type!
+ isDeprecated: Boolean!
+ deprecationReason: String
+}
+
+type __InputValue {
+ name: String!
+ description: String
+ type: __Type!
+ defaultValue: String
+}
+
+type __EnumValue {
+ name: String!
+ description: String
+ isDeprecated: Boolean!
+ deprecationReason: String
+}
+
+enum __TypeKind {
+ SCALAR
+ OBJECT
+ INTERFACE
+ UNION
+ ENUM
+ INPUT_OBJECT
+ LIST
+ NON_NULL
+}
+
+type __Directive {
+ name: String!
+ description: String
+ locations: [__DirectiveLocation!]!
+ args: [__InputValue!]!
+}
+
+enum __DirectiveLocation {
+ QUERY
+ MUTATION
+ SUBSCRIPTION
+ FIELD
+ FRAGMENT_DEFINITION
+ FRAGMENT_SPREAD
+ INLINE_FRAGMENT
+ SCHEMA
+ SCALAR
+ OBJECT
+ FIELD_DEFINITION
+ ARGUMENT_DEFINITION
+ INTERFACE
+ UNION
+ ENUM
+ ENUM_VALUE
+ INPUT_OBJECT
+ INPUT_FIELD_DEFINITION
+}
+
+scalar Int
+scalar Float
+scalar String
+scalar Boolean
+scalar ID
diff --git a/server/stack-nightly.yaml b/server/stack-nightly.yaml
new file mode 100644
index 00000000..88315265
--- /dev/null
+++ b/server/stack-nightly.yaml
@@ -0,0 +1,46 @@
+# For more information, see: http://docs.haskellstack.org/en/stable/yaml_configuration.html
+
+# Specifies the GHC version and set of packages available (e.g., lts-3.5, nightly-2015-09-21, ghc-7.10.2)
+# resolver: lts-10.8
+resolver: nightly-2018-06-27
+
+# Local packages, usually specified by relative directory name
+packages:
+- '.'
+# - '../../../graphql-parser-hs'
+# - extra-libs/aeson
+# - extra-libs/logger/wai-logger
+
+# Packages to be pulled from upstream that are not in the resolver (e.g., acme-missiles-0.3)
+extra-deps:
+# - graphql-api-0.3.0
+- git: git@github.com:hasura/pg-client-hs.git
+ commit: 77995388cab656f9180b851f33f3d603cf1017c7
+- git: git@github.com:hasura/graphql-parser-hs.git
+ commit: 59426f985a68a71cef566fe4ee11ae3b11deaa65
+- Spock-core-0.13.0.0
+- reroute-0.5.0.0
+
+# Override default flag values for local packages and extra-deps
+flags: {}
+
+# Extra package databases containing global packages
+extra-package-dbs: []
+
+# Control whether we use the GHC we find on the path
+# system-ghc: true
+
+# Require a specific version of stack, using version ranges
+# require-stack-version: -any # Default
+# require-stack-version: >= 1.0.0
+
+# Override the architecture used by stack, especially useful on Windows
+# arch: i386
+# arch: x86_64
+
+# Extra directories used by stack for building
+# extra-include-dirs: [/path/to/dir]
+# extra-lib-dirs: [/path/to/dir]
+
+# Allow a newer minor version of GHC than the snapshot specifies
+# compiler-check: newer-minor
diff --git a/server/stack.yaml b/server/stack.yaml
new file mode 100644
index 00000000..29bb84dd
--- /dev/null
+++ b/server/stack.yaml
@@ -0,0 +1,44 @@
+# For more information, see: http://docs.haskellstack.org/en/stable/yaml_configuration.html
+
+# Specifies the GHC version and set of packages available (e.g., lts-3.5, nightly-2015-09-21, ghc-7.10.2)
+# resolver: lts-10.8
+resolver: lts-11.15
+
+# Local packages, usually specified by relative directory name
+packages:
+- '.'
+# - '../../../graphql-parser-hs'
+# - extra-libs/aeson
+# - extra-libs/logger/wai-logger
+
+# Packages to be pulled from upstream that are not in the resolver (e.g., acme-missiles-0.3)
+extra-deps:
+# - graphql-api-0.3.0
+- git: git@github.com:hasura/pg-client-hs.git
+ commit: 77995388cab656f9180b851f33f3d603cf1017c7
+- git: git@github.com:hasura/graphql-parser-hs.git
+ commit: 59426f985a68a71cef566fe4ee11ae3b11deaa65
+
+# Override default flag values for local packages and extra-deps
+flags: {}
+
+# Extra package databases containing global packages
+extra-package-dbs: []
+
+# Control whether we use the GHC we find on the path
+# system-ghc: true
+
+# Require a specific version of stack, using version ranges
+# require-stack-version: -any # Default
+# require-stack-version: >= 1.0.0
+
+# Override the architecture used by stack, especially useful on Windows
+# arch: i386
+# arch: x86_64
+
+# Extra directories used by stack for building
+# extra-include-dirs: [/path/to/dir]
+# extra-lib-dirs: [/path/to/dir]
+
+# Allow a newer minor version of GHC than the snapshot specifies
+# compiler-check: newer-minor
diff --git a/server/test/Main.hs b/server/test/Main.hs
new file mode 100644
index 00000000..4743525e
--- /dev/null
+++ b/server/test/Main.hs
@@ -0,0 +1,275 @@
+{-# LANGUAGE OverloadedStrings #-}
+
+module Main where
+import Data.Bool
+
+import Control.Monad.Trans.Except
+
+import System.Exit (exitFailure)
+
+import Control.Monad
+
+import Data.List as L
+
+import Data.Aeson as A hiding (Options)
+
+import Data.Graph.Inductive.Basic as DB
+import Data.Graph.Inductive.Graph as G hiding (mkGraph)
+import Data.Graph.Inductive.PatriciaTree as GT
+import Data.Graph.Inductive.Query.DFS as DFS
+
+import Data.HashMap.Strict as HM
+
+import Network.Wai.Test (SResponse(..))
+
+import Test.Hspec
+import Test.Hspec.Core.Runner
+import Test.Hspec.Core.Spec hiding (runIO)
+import Test.Hspec.Wai
+
+import qualified Data.Text as T
+
+import qualified Data.ByteString.Char8 as BS
+import qualified Data.ByteString.Builder as BB
+import qualified Data.ByteString.Lazy.Char8 as BL
+
+import System.Directory
+import System.Environment (withArgs)
+import System.FilePath.Posix
+
+import Hasura.Server.Init
+import Hasura.Prelude
+import Hasura.Server.App (ravenLogGen, RavenLogger, app, AuthMode(..))
+import Hasura.Server.Logging (withStdoutLogger)
+import qualified Database.PG.Query as Q
+
+
+import qualified Database.PG.Query as PGQ
+
+import Web.Spock.Core (spockT, spockAsApp)
+
+import qualified Network.HTTP.Types as H
+import Network.Wai (Application)
+
+import Options.Applicative hiding (action)
+
+defTxMode :: Q.TxMode
+defTxMode = (Q.Serializable, Nothing)
+
+data TestItem
+ = TestItem
+ {
+ itRequest :: !Object,
+ itResponse :: !(Maybe Value),
+ itStatusCode :: !Int,
+ itRole :: !String,
+ itUserID :: !String,
+ itUrl :: !String,
+ itMethod :: !String,
+ itName :: !String,
+ itSQLCheck :: !(Maybe [String]),
+ itPreSQL :: !(Maybe [String]),
+ itPostSQL :: !(Maybe [String])
+ } deriving (Show, Eq)
+
+instance FromJSON TestItem where
+ parseJSON (Object v) =
+ TestItem <$> v .: "request"
+ <*> v .:? "response"
+ <*> v .: "status_code"
+ <*> v .: "role"
+ <*> v .: "user_id"
+ <*> v .: "url"
+ <*> v .: "method"
+ <*> v .: "name"
+ <*> v .:? "sql_check"
+ <*> v .:? "pre_sql"
+ <*> v .:? "post_sql"
+
+ parseJSON _ = fail "Expected an object for select"
+
+data Dependency = Dependency !FilePath !(Maybe [String])
+ deriving (Show, Eq)
+
+instance FromJSON Dependency where
+ parseJSON (String s) =
+ return $ Dependency (T.unpack s) Nothing
+ parseJSON (Object o) =
+ Dependency <$> o .: "filename"
+ <*> o .:? "names"
+ parseJSON _ = fail "Expected an object for select"
+
+data TestCase
+ = TestCase
+ {
+ itDescription :: !String,
+ tcItems :: ![TestItem],
+ tcDepends :: ![Dependency]
+ } deriving (Show, Eq)
+
+instance FromJSON TestCase where
+ parseJSON (Object v) =
+ TestCase <$> v .: "description"
+ <*> v .: "items"
+ <*> v .: "depends"
+ parseJSON _ = fail "Expected an object for select"
+
+
+type GraphInfo = (HashMap FilePath Int, GT.Gr FilePath ())
+
+depToFp :: Dependency -> FilePath
+depToFp (Dependency x _) = x
+
+fileToTestCase :: FilePath -> IO TestCase
+fileToTestCase fp = do
+ contents <- BL.readFile fp
+ either fail return $ A.eitherDecode contents
+
+consGraph :: GraphInfo -> FilePath -> IO GraphInfo
+consGraph (nodeMap, graph) fp = do
+ tc <- fileToTestCase fp
+
+ let node = (case HM.lookup fp nodeMap of
+ Nothing -> 1 + HM.size nodeMap
+ Just x -> x)
+
+ depends = L.map (combine (takeDirectory fp) . depToFp) $ tcDepends tc
+
+ (nodeMap', graph') <- foldM consGraph (HM.insert fp node nodeMap, graph) depends
+
+ let outLinks = L.map (nodeMap' !) depends
+ ctxt = ([], node, fp, zip [(),()..] outLinks)
+
+ return $ (nodeMap', ctxt & graph')
+
+
+mkGraph :: [FilePath] -> IO GraphInfo
+mkGraph = foldM consGraph (HM.empty, G.empty)
+
+-- | 'topsort', returning only the labels of the nodes.
+topsortFrom :: (Graph gr) => Node -> gr a b -> [a]
+topsortFrom node = reverse . postorderF . dffWith go [node] where
+ go (_, _, label, _) = label
+
+hasura_req :: TestItem -> WaiSession SResponse
+hasura_req ti = request method url [
+ ("x-hasura-role", role),
+ ("x-hasura-user-id", userID)] body
+ where
+ method = BS.pack $ itMethod ti
+ role = BS.pack $ itRole ti
+ userID = BS.pack $ itUserID ti
+ url = BS.pack $ itUrl ti
+ body = A.encode $ itRequest ti
+
+runSQL :: Q.PGPool -> String -> IO ()
+runSQL pool queryStmt = do
+ let q = Q.withQE PGQ.PGExecErrTx (Q.fromBuilder $ BB.stringUtf8 queryStmt) () False
+ _ <- runExceptT $ Q.runTx pool defTxMode q :: (IO (Either PGQ.PGExecErr Q.Discard))
+ return ()
+
+checkSQL :: Q.PGPool -> String -> IO Bool
+checkSQL pool queryStmt = do
+ let q = Q.withQE PGQ.PGExecErrTx (Q.fromBuilder $ BB.stringUtf8 queryStmt) () False
+ res <- liftIO $ runExceptT $ Q.runTx pool defTxMode q
+ case res of
+ Left x -> print x >> return False
+ Right (Q.WithCount n (Q.Discard _)) -> return $ n == 1
+
+matchTestItem :: WaiSession SResponse -> TestItem -> Q.PGPool -> WaiExpectation
+matchTestItem action ti pool = do
+ liftIO $ case (itPreSQL ti) of
+ Nothing -> return ()
+ Just sqlExps -> do
+ forM_ sqlExps (runSQL pool)
+
+ r <- action
+
+ liftIO $ (H.statusCode $ simpleStatus r) `shouldBe` itStatusCode ti
+
+ case itResponse ti of
+ Nothing -> return ()
+ Just resp -> do
+ jsonBody <- (either (const error resp) return $ A.eitherDecode $ simpleBody r) :: WaiSession A.Value
+ let
+ this = BL.unpack $ A.encode jsonBody
+ that = BL.unpack $ A.encode resp
+
+ if jsonBody == resp then return ()
+ else liftIO . expectationFailure $ "For " ++ (itName ti) ++ "\nGot\n" ++ this ++ "\ninstead of\n" ++ that where
+
+ liftIO $ case (itPostSQL ti) of
+ Nothing -> return ()
+ Just sqlExps -> do
+ forM_ sqlExps (runSQL pool)
+
+ liftIO $ case (itSQLCheck ti) of
+ Nothing -> return ()
+ Just sqlExps -> do
+ ress <- forM sqlExps (checkSQL pool)
+ mapM_ (\(i, res) -> bool
+ (expectationFailure ("SQL check " ++ show i ++ " failed"))
+ (return ()) res) $ zip [(1 :: Int)..] ress
+
+mkSpecLeaf :: Q.PGPool -> String -> [TestItem] -> [TestItem] -> SpecTree (Arg WaiExpectation)
+mkSpecLeaf pool tcDesc tis depTis= specItem tcDesc $ do
+ mapM_ hasura_req depTis
+
+ foldM (\_ ti -> matchTestItem (hasura_req ti) ti pool) () tis
+
+mkSpecList :: Q.PGPool -> TestCase -> [TestCase] -> SpecTree (Arg WaiExpectation)
+mkSpecList pool (TestCase desc items _) deps = do
+ let depItems = L.concatMap tcItems deps
+ mkSpecLeaf pool desc items depItems
+
+resetStateTx :: Q.TxE PGQ.PGExecErr ()
+resetStateTx = do
+ Q.unitQE PGQ.PGExecErrTx "DROP SCHEMA hdb_catalog CASCADE" () False
+ Q.unitQE PGQ.PGExecErrTx "DROP SCHEMA raven CASCADE" () False
+ Q.unitQE PGQ.PGExecErrTx "DROP SCHEMA raven_views CASCADE" () False
+ Q.unitQE PGQ.PGExecErrTx "DROP SCHEMA public CASCADE" () False
+ Q.unitQE PGQ.PGExecErrTx "CREATE SCHEMA public" () False
+
+raven_app :: RavenLogger -> PGQ.PGPool -> IO Application
+raven_app rlogger pool =
+ do
+ _ <- liftIO $ runExceptT $ Q.runTx pool defTxMode resetStateTx
+ let corsCfg = CorsConfig "*" True -- cors is disabled
+ spockAsApp $ spockT id $ app Q.Serializable Nothing rlogger pool AMNoAuth corsCfg -- no access key and no webhook
+
+main :: IO ()
+main = withStdoutLogger ravenLogGen $ \rlogger -> do
+ Options rci cp args <- parseArgs
+
+ ci <- either ((>> exitFailure) . (putStrLn . connInfoErrModifier))
+ return $ mkConnInfo rci
+
+ pool <- Q.initPGPool ci cp
+
+ files <- forM args makeAbsolute
+
+ (nodeMap, graph) <- mkGraph files
+
+ specs <- forM files $ \fp -> do
+ let node = nodeMap ! fp
+ depFiles = L.reverse $ tail $ topsortFrom node graph
+
+ tc <- fileToTestCase fp
+ tcs <- forM depFiles fileToTestCase
+
+ return $ mkSpecList pool tc tcs
+
+ withArgs [] $ hspecWith defaultConfig $ with (raven_app rlogger pool) $ fromSpecList specs
+
+
+data Options = Options RawConnInfo Q.ConnParams [FilePath]
+
+parseArgs :: IO Options
+parseArgs = execParser opts
+ where
+ optParser = Options <$> parseRawConnInfo <*> parseConnParams <*> some (
+ argument str (metavar "FILES..."))
+
+ opts = info (helper <*> optParser)
+ ( fullDesc <>
+ header "raven-test")
diff --git a/server/testcases/add_column.json b/server/testcases/add_column.json
new file mode 100644
index 00000000..cf86de7a
--- /dev/null
+++ b/server/testcases/add_column.json
@@ -0,0 +1,49 @@
+
+{
+ "description": "add column test",
+ "depends": [
+ "permission.json"
+ ],
+ "items": [
+ {
+ "name": "add column",
+ "url": "/api/1/query",
+ "user_id": "1",
+ "role": "admin",
+ "status_code": 200,
+ "method": "POST",
+ "sql_check": [
+ "SELECT 1 where EXISTS (select * from information_schema.columns where table_name = 'author' AND column_name='age')"
+ ],
+ "request": {
+ "kind": "add_column",
+ "body" : {
+ "table": "author",
+ "column": {
+ "name": "age",
+ "type": "integer"
+ }
+ }
+ }
+ },
+ {
+ "name": "add column",
+ "url": "/api/1/query",
+ "user_id": "1",
+ "role": "admin",
+ "status_code": 400,
+ "method": "POST",
+ "request": {
+ "kind": "add_column",
+ "body" : {
+ "table": "author",
+ "column": {
+ "name": "id",
+ "type": "integer"
+ }
+ }
+ }
+ }
+
+ ]
+}
\ No newline at end of file
diff --git a/server/testcases/add_existing_table.json b/server/testcases/add_existing_table.json
new file mode 100644
index 00000000..b1bfcb8c
--- /dev/null
+++ b/server/testcases/add_existing_table.json
@@ -0,0 +1,42 @@
+{
+ "description": "schema test",
+ "depends": [],
+ "items": [
+ {
+ "name": "create_schema",
+ "url": "/api/1/query",
+ "role": "admin",
+ "status_code": 200,
+ "method": "POST",
+ "user_id": "1",
+ "pre_sql": [
+ "CREATE TABLE test(id serial);"
+ ],
+
+ "post_sql": [
+ "DROP TABLE test;"
+ ],
+
+ "request": {
+ "kind": "add_existing_table",
+ "body": {
+ "table": "test"
+ }
+ }
+ },
+ {
+ "name": "create_schema",
+ "url": "/api/1/query",
+ "role": "admin",
+ "status_code": 400,
+ "method": "POST",
+ "user_id": "1",
+ "request": {
+ "kind": "add_existing_table",
+ "body": {
+ "table": "test"
+ }
+ }
+ }
+ ]
+}
diff --git a/server/testcases/add_existing_view.json b/server/testcases/add_existing_view.json
new file mode 100644
index 00000000..2f2f3ae5
--- /dev/null
+++ b/server/testcases/add_existing_view.json
@@ -0,0 +1,42 @@
+{
+ "description": "add existing view",
+ "depends": [],
+ "items": [
+ {
+ "name": "create_schema",
+ "url": "/api/1/query",
+ "role": "admin",
+ "status_code": 200,
+ "method": "POST",
+ "user_id": "1",
+ "pre_sql": [
+ "CREATE TABLE test(id serial)",
+ "CREATE VIEW test_view AS (SELECT id FROM test WHERE id > 10)"
+ ],
+ "request": {
+ "kind": "add_existing_view",
+ "body": {
+ "view": "test_view"
+ }
+ }
+ },
+ {
+ "name": "create_schema",
+ "url": "/api/1/query",
+ "role": "admin",
+ "status_code": 400,
+ "method": "POST",
+ "response": {
+ "path": "$",
+ "error": "view already tracked"
+ },
+ "user_id": "1",
+ "request": {
+ "kind": "add_existing_view",
+ "body": {
+ "view": "test_view"
+ }
+ }
+ }
+ ]
+}
\ No newline at end of file
diff --git a/server/testcases/alter_col_nullable.json b/server/testcases/alter_col_nullable.json
new file mode 100644
index 00000000..c47d2747
--- /dev/null
+++ b/server/testcases/alter_col_nullable.json
@@ -0,0 +1,84 @@
+{
+ "description": "alter column nullable test",
+ "depends": [],
+ "items": [
+ {
+ "name": "create_schema",
+ "url": "/api/1/query",
+ "role": "admin",
+ "status_code": 200,
+ "method": "POST",
+ "user_id": "1",
+ "request": {
+ "kind": "create_table",
+ "body": {
+ "primary_key": [
+ "id"
+ ],
+ "__type": "create_table",
+ "name": "a",
+ "columns": [
+ {
+ "type": "serial",
+ "name": "id"
+ },
+ {
+ "type": "integer",
+ "name": "b",
+ "nullable": false
+ }
+ ]
+ }
+ }
+ },
+ {
+ "name": "insert with null",
+ "url": "/api/1/table/a/insert",
+ "role": "admin",
+ "user_id": "1",
+ "status_code": 400,
+ "request": {
+ "objects": [
+ {}
+ ]
+ },
+ "response": {
+ "path": "$",
+ "error": "Not-NULL violation. null value in column \"b\" violates not-null constraint"
+ },
+ "method": "POST"
+ },
+ {
+ "name": "create_schema",
+ "url": "/api/1/query",
+ "role": "admin",
+ "status_code": 200,
+ "method": "POST",
+ "user_id": "1",
+ "request": {
+ "kind": "alter_column_nullable",
+ "body": {
+ "table": "a",
+ "nullable": true,
+ "column": "b"
+ }
+ }
+ },
+ {
+ "name": "insert with null (should fail)",
+ "url": "/api/1/table/a/insert",
+ "role": "admin",
+ "user_id": "1",
+ "status_code": 200,
+ "request": {
+ "objects": [
+ {}
+ ]
+ },
+ "response": {
+ "affected_rows": 1
+ },
+ "method": "POST"
+ }
+ ]
+}
\ No newline at end of file
diff --git a/server/testcases/alter_column_default.json b/server/testcases/alter_column_default.json
new file mode 100644
index 00000000..303944c6
--- /dev/null
+++ b/server/testcases/alter_column_default.json
@@ -0,0 +1,53 @@
+{
+ "description": "alter defaults test",
+ "depends": [],
+ "items": [
+ {
+ "name": "alter_column_default",
+ "url": "/api/1/query",
+ "role": "admin",
+ "status_code": 200,
+ "method": "POST",
+ "user_id": "1",
+ "request": {
+ "kind": "create_table",
+ "body":
+ {
+ "primary_key": [
+ "id"
+ ],
+ "name": "a",
+ "columns": [
+ {
+ "type": "serial",
+ "name": "id"
+ },
+ {
+ "type": "timetz",
+ "name": "ts_sql",
+ "default_sql": "now()"
+ }
+ ]
+ }
+ }
+ },
+ {
+ "name": "create_schema",
+ "url": "/api/1/query",
+ "role": "admin",
+ "status_code": 400,
+ "response": {"path":"$","error":"Expecting ISO8601 formatted time"},
+ "method": "POST",
+ "user_id": "1",
+ "request": {
+ "kind": "alter_column_default",
+ "body":
+ {
+ "table": "a",
+ "new_default": "now()",
+ "column": "ts_sql"
+ }
+ }
+ }
+ ]
+ }
\ No newline at end of file
diff --git a/server/testcases/alter_column_type.json b/server/testcases/alter_column_type.json
new file mode 100644
index 00000000..be5fe586
--- /dev/null
+++ b/server/testcases/alter_column_type.json
@@ -0,0 +1,69 @@
+{
+ "description": "alter column type test",
+ "depends": [],
+ "items": [
+ {
+ "name": "alter_column_default",
+ "url": "/api/1/query",
+ "role": "admin",
+ "status_code": 200,
+ "method": "POST",
+ "user_id": "1",
+ "request": {
+ "kind": "create_table",
+ "body":
+ {
+ "primary_key": [
+ "id"
+ ],
+ "name": "a",
+ "columns": [
+ {
+ "type": "serial",
+ "name": "id"
+ },
+ {
+ "type": "integer",
+ "name": "ts_sql",
+ "default_sql": "now()"
+ }
+ ]
+ }
+ }
+ },
+ {
+ "name": "alter_column_type",
+ "url": "/api/1/query",
+ "role": "admin",
+ "status_code": 400,
+ "method": "POST",
+ "user_id": "1",
+ "request": {
+ "kind": "alter_column_type",
+ "body":
+ {
+ "table": "a",
+ "new_type": "text",
+ "column": "ts_sql"
+ }
+ }
+ },
+ {
+ "name": "alter_column_type nonexistent",
+ "url": "/api/1/query",
+ "role": "admin",
+ "status_code": 400,
+ "method": "POST",
+ "user_id": "1",
+ "request": {
+ "kind": "alter_column_type",
+ "body":
+ {
+ "table": "a",
+ "new_type": "text",
+ "column": "blah"
+ }
+ }
+ }
+ ]
+ }
\ No newline at end of file
diff --git a/server/testcases/author_article_category.json b/server/testcases/author_article_category.json
new file mode 100644
index 00000000..33581608
--- /dev/null
+++ b/server/testcases/author_article_category.json
@@ -0,0 +1,199 @@
+{
+ "description": "author article category schema",
+ "depends": [],
+ "items": [
+ {
+ "name": "create author table",
+ "url": "/api/1/query",
+ "role": "admin",
+ "status_code": 200,
+ "method": "POST",
+ "user_id": "1",
+ "request": {
+ "kind": "create_table",
+ "body": {
+ "primary_key": [
+ "id"
+ ],
+ "name": "author",
+ "columns": [
+ {
+ "type": "serial",
+ "name": "id"
+ },
+ {
+ "type": "varchar",
+ "name": "email"
+ },
+ {
+ "type": "varchar",
+ "name": "name"
+ },
+ {
+ "type": "integer",
+ "name": "auth_id"
+ }
+ ]
+ }
+ }
+ },
+ {
+ "name": "create category table",
+ "url": "/api/1/query",
+ "role": "admin",
+ "status_code": 200,
+ "method": "POST",
+ "user_id": "1",
+ "request": {
+ "kind": "create_table",
+ "body": {
+ "primary_key": [
+ "id"
+ ],
+ "__type": "create_table",
+ "name": "category",
+ "columns": [
+ {
+ "type": "serial",
+ "name": "id"
+ },
+ {
+ "type": "text",
+ "name": "description"
+ },
+ {
+ "type": "varchar",
+ "name": "name"
+ }
+ ]
+ }
+ }
+ },
+ {
+ "name": "create article table",
+ "url": "/api/1/query",
+ "role": "admin",
+ "status_code": 200,
+ "method": "POST",
+ "user_id": "1",
+ "request": {
+ "kind": "create_table",
+ "body": {
+ "primary_key": [
+ "id"
+ ],
+ "__type": "create_table",
+ "name": "article",
+ "columns": [
+ {
+ "type": "serial",
+ "name": "id"
+ },
+ {
+ "type": "numeric",
+ "name": "rating"
+ },
+ {
+ "type": "text",
+ "name": "title"
+ },
+ {
+ "type": "text",
+ "name": "content"
+ },
+ {
+ "type": "integer",
+ "references": {
+ "column": "id",
+ "table": "author"
+ },
+ "name": "author_id"
+ },
+ {
+ "type": "integer",
+ "references": {
+ "column": "id",
+ "table": "category"
+ },
+ "name": "category_id"
+ },
+ {
+ "type": "boolean",
+ "name": "is_published"
+ }
+ ]
+ }
+ }
+ },
+ {
+ "name": "create author->article",
+ "url": "/api/1/query",
+ "role": "admin",
+ "status_code": 200,
+ "method": "POST",
+ "user_id": "1",
+ "request": {
+ "kind": "create_object_relationship",
+ "body": {
+ "using": "author_id",
+ "name": "author",
+ "table": "article"
+ }
+ }
+ },
+ {
+ "name": "create article->category",
+ "url": "/api/1/query",
+ "role": "admin",
+ "status_code": 200,
+ "method": "POST",
+ "user_id": "1",
+ "request": {
+ "kind": "create_object_relationship",
+ "body": {
+ "using": "category_id",
+ "name": "category",
+ "table": "article"
+ }
+ }
+ },
+ {
+ "name": "create author->articles",
+ "url": "/api/1/query",
+ "role": "admin",
+ "status_code": 200,
+ "method": "POST",
+ "user_id": "1",
+ "request": {
+ "kind": "create_array_relationship",
+ "body": {
+ "using": {
+ "column": "author_id",
+ "table": "article"
+ },
+ "name": "articles",
+ "table": "author"
+ }
+ }
+ },
+ {
+ "name": "create category->articles",
+ "url": "/api/1/query",
+ "role": "admin",
+ "status_code": 200,
+ "method": "POST",
+ "user_id": "1",
+ "request": {
+ "kind": "create_array_relationship",
+ "body": {
+ "using": {
+ "column": "category_id",
+ "table": "article"
+ },
+ "name": "articles",
+ "table": "category"
+ }
+ }
+ }
+ ]
+}
\ No newline at end of file
diff --git a/server/testcases/changeset_qerr.json b/server/testcases/changeset_qerr.json
new file mode 100644
index 00000000..decc71d5
--- /dev/null
+++ b/server/testcases/changeset_qerr.json
@@ -0,0 +1,40 @@
+{
+ "description": "changeset qerr test",
+ "depends": [
+ "permission.json"
+ ],
+ "items": [
+ {
+ "name": "add column 1",
+ "url": "/api/1/query",
+ "user_id": "1",
+ "role": "admin",
+ "status_code": 400,
+ "method": "POST",
+ "request": {
+ "kind": "add_column",
+ "body": {
+ "__type": "add_column",
+ "table": "author",
+ "column": "age"
+ }
+ }
+ },
+ {
+ "name": "add column 2 (check for lockup)",
+ "url": "/api/1/query",
+ "user_id": "1",
+ "role": "admin",
+ "status_code": 400,
+ "method": "POST",
+ "request": {
+ "kind": "add_column",
+ "body": {
+ "__type": "add_column",
+ "table": "author",
+ "column": "age"
+ }
+ }
+ }
+ ]
+}
\ No newline at end of file
diff --git a/server/testcases/check_author_table_permissions.json b/server/testcases/check_author_table_permissions.json
new file mode 100644
index 00000000..95cb99b4
--- /dev/null
+++ b/server/testcases/check_author_table_permissions.json
@@ -0,0 +1,220 @@
+{
+ "description": "check author table permissions",
+ "depends": [
+ "permission.json"
+ ],
+ "items": [
+ {
+ "name": "create author as user",
+ "url": "/api/1/query",
+ "role": "user",
+ "user_id": "1",
+ "response": {"path":"$","error":"insert on \"author\" for role \"user\" is not allowed. "},
+ "status_code": 400,
+ "request": {
+ "kind": "insert",
+ "body": {
+ "table": "author",
+ "objects": [
+ {
+ "name": "Balaji",
+ "auth_id": 1,
+ "email": "google@balaji.com"
+ }
+ ],
+ "returning": [
+ "id"
+ ]
+ }
+ },
+ "method": "POST"
+ },
+ {
+ "name": "create author B1 as admin",
+ "url": "/api/1/table/author/insert",
+ "role": "admin",
+ "user_id": "1",
+ "status_code": 200,
+ "request": {
+ "objects": [
+ {
+ "name": "B1",
+ "auth_id": 1,
+ "email": "google@gmail.com"
+ }
+ ],
+ "returning": [
+ "id"
+ ]
+ },
+ "response": {
+ "affected_rows": 1,
+ "returning": [
+ {
+ "id": 1
+ }
+ ]
+ },
+ "method": "POST"
+ },
+ {
+ "name": "create author B2 as admin",
+ "url": "/api/1/table/author/insert",
+ "role": "admin",
+ "user_id": "1",
+ "status_code": 200,
+ "request": {
+ "objects": [
+ {
+ "name": "B2",
+ "auth_id": 2,
+ "email": "google@balaji.com"
+ }
+ ],
+ "returning": [
+ "id"
+ ]
+ },
+ "response": {
+ "affected_rows": 1,
+ "returning": [
+ {
+ "id": 2
+ }
+ ]
+ },
+ "method": "POST"
+ },
+ {
+ "name": "get author as admin",
+ "url": "/api/1/query",
+ "role": "admin",
+ "user_id": "1",
+ "status_code": 200,
+ "request": {
+ "kind": "select",
+ "body": {
+ "table": "author",
+ "columns": [
+ "name"
+ ]
+ }
+ },
+ "response": [
+ {
+ "name": "B1"
+ },
+ {
+ "name": "B2"
+ }
+ ],
+ "method": "POST"
+ },
+ {
+ "name": "select as user id 1",
+ "url": "/api/1/query",
+ "role": "user",
+ "user_id": "1",
+ "status_code": 200,
+ "request": {
+ "kind": "select",
+ "body": {
+ "table": "author",
+ "columns": [
+ "name"
+ ]
+ }
+ },
+ "response": [
+ {
+ "name": "B1"
+ }
+ ],
+ "method": "POST"
+ },
+ {
+ "name": "select as user id 2",
+ "url": "/api/1/query",
+ "role": "user",
+ "user_id": "2",
+ "status_code": 200,
+ "request": {
+ "kind": "select",
+ "body": {
+ "table": "author",
+ "columns": [
+ "name"
+ ]
+ }
+ },
+ "response": [
+ {
+ "name": "B2"
+ }
+ ],
+ "method": "POST"
+ },
+ {
+ "name": "update B1 as user 1",
+ "url": "/api/1/table/author/update",
+ "role": "user",
+ "user_id": "1",
+ "status_code": 200,
+ "method": "POST",
+ "response": {
+ "affected_rows": 1
+ },
+ "request": {
+ "where": {
+ "name": "B1"
+ },
+ "$set": {
+ "name": "B1 (new)"
+ }
+ }
+ },
+ {
+ "name": "update B1 as user 2",
+ "url": "/api/1/table/author/update",
+ "role": "user",
+ "user_id": "2",
+ "status_code": 200,
+ "method": "POST",
+ "response": {
+ "affected_rows": 0
+ },
+ "request": {
+ "where": {
+ "name": "B1"
+ },
+ "$set": {
+ "name": "B1 sucks"
+ }
+ }
+ },
+ {
+ "name": "update email as user",
+ "url": "/api/1/query",
+ "role": "user",
+ "user_id": "1",
+ "status_code": 400,
+ "response": {
+ "path": "$.$set",
+ "error": "role \"user\" does not have permission to update column \"email\""
+ },
+ "method": "POST",
+ "request": {
+ "kind": "update",
+ "body": {
+ "table": "author",
+ "where": {
+ "name": "B1"
+ },
+ "$set": {
+ "email": "B1@gmail.com"
+ }
+ }
+ }
+ }
+ ]
+}
\ No newline at end of file
diff --git a/server/testcases/check_constraint.json b/server/testcases/check_constraint.json
new file mode 100644
index 00000000..df98984f
--- /dev/null
+++ b/server/testcases/check_constraint.json
@@ -0,0 +1,269 @@
+{
+ "description": "check constraint test",
+ "depends": [],
+ "items": [
+ {
+ "name": "create_schema",
+ "url": "/api/1/query",
+ "role": "admin",
+ "user_id": "1",
+ "status_code": 200,
+ "method": "POST",
+ "request": {
+ "kind": "create_table",
+ "body": {
+ "name": "user",
+ "columns": [
+ {
+ "type": "varchar",
+ "name": "name"
+ },
+ {
+ "type": "serial",
+ "name": "id"
+ }
+ ],
+ "primary_key": [
+ "id"
+ ],
+ "unique_constraints": [
+ {
+ "columns": [
+ "id",
+ "name"
+ ]
+ }
+ ]
+ }
+ }
+ },
+ {
+ "name": "create_schema",
+ "url": "/api/1/query",
+ "role": "admin",
+ "user_id": "1",
+ "status_code": 200,
+ "method": "POST",
+ "request": {
+ "kind": "create_table",
+ "body": {
+ "__type": "create_table",
+ "name": "author",
+ "columns": [
+ {
+ "type": "serial",
+ "name": "id"
+ },
+ {
+ "type": "varchar",
+ "name": "email"
+ },
+ {
+ "type": "varchar",
+ "name": "name"
+ },
+ {
+ "type": "integer",
+ "name": "auth_id"
+ }
+ ],
+ "primary_key": [
+ "id"
+ ],
+ "foreign_key_constraints": [
+ {
+ "mapping": {
+ "auth_id": "id",
+ "name": "name"
+ },
+ "columns": [
+ "auth_id",
+ "name"
+ ],
+ "ref_table": "user",
+ "ref_columns": [
+ "id",
+ "name"
+ ]
+ }
+ ],
+ "check_constraints": [
+ {
+ "check": {
+ "$or": [
+ {
+ "email": {
+ "$like": "%@gmail.com"
+ }
+ }
+ ]
+ }
+ }
+ ],
+ "unique_constraints": [
+ {
+ "columns": [
+ "name",
+ "email"
+ ]
+ }
+ ]
+ }
+ }
+ },
+ {
+ "name": "create user Spock as admin",
+ "url": "/api/1/table/user/insert",
+ "role": "admin",
+ "user_id": "1",
+ "status_code": 200,
+ "request": {
+ "objects": [
+ {
+ "name": "Spock",
+ "id": 1
+ }
+ ],
+ "returning": [
+ "id"
+ ]
+ },
+ "response": {
+ "affected_rows": 1,
+ "returning": [
+ {
+ "id": 1
+ }
+ ]
+ },
+ "method": "POST"
+ },
+ {
+ "name": "create user Vader as admin",
+ "url": "/api/1/table/user/insert",
+ "role": "admin",
+ "user_id": "1",
+ "status_code": 200,
+ "request": {
+ "objects": [
+ {
+ "name": "Vader",
+ "id": 2
+ }
+ ],
+ "returning": [
+ "id"
+ ]
+ },
+ "response": {
+ "affected_rows": 1,
+ "returning": [
+ {
+ "id": 2
+ }
+ ]
+ },
+ "method": "POST"
+ },
+ {
+ "name": "create author Spock as admin",
+ "url": "/api/1/table/author/insert",
+ "role": "admin",
+ "user_id": "1",
+ "status_code": 200,
+ "request": {
+ "objects": [
+ {
+ "name": "Spock",
+ "auth_id": 1,
+ "email": "spock@gmail.com"
+ }
+ ],
+ "returning": [
+ "id"
+ ]
+ },
+ "response": {
+ "affected_rows": 1,
+ "returning": [
+ {
+ "id": 1
+ }
+ ]
+ },
+ "method": "POST"
+ },
+ {
+ "name": "create author Vader as admin",
+ "url": "/api/1/table/author/insert",
+ "role": "admin",
+ "user_id": "1",
+ "status_code": 400,
+ "request": {
+ "objects": [
+ {
+ "name": "Vader",
+ "auth_id": 1,
+ "email": "vader@b56.com"
+ }
+ ],
+ "returning": [
+ "id"
+ ]
+ },
+ "method": "POST",
+ "response": {
+ "path": "$",
+ "error": "Check constraint violation. new row for relation \"author\" violates check constraint \"author_email_check\""
+ }
+ },
+ {
+ "name": "create Spock as admin with diff auth id",
+ "url": "/api/1/table/author/insert",
+ "role": "admin",
+ "user_id": "1",
+ "status_code": 400,
+ "request": {
+ "objects": [
+ {
+ "name": "Spock",
+ "auth_id": 1,
+ "email": "spock@gmail.com"
+ }
+ ],
+ "returning": [
+ "id"
+ ]
+ },
+ "response": {
+ "path": "$",
+ "error": "Uniqueness violation. duplicate key value violates unique constraint \"author_name_email_key\""
+ },
+ "method": "POST"
+ },
+ {
+ "name": "create Jack",
+ "url": "/api/1/table/author/insert",
+ "role": "admin",
+ "user_id": "1",
+ "status_code": 400,
+ "request": {
+ "objects": [
+ {
+ "name": "Jack",
+ "auth_id": 1,
+ "email": "jack@gmail.com"
+ }
+ ],
+ "returning": [
+ "id"
+ ]
+ },
+ "response": {
+ "path": "$",
+ "error": "Foreign key violation. insert or update on table \"author\" violates foreign key constraint \"author_auth_id_fkey\""
+ },
+ "method": "POST"
+ }
+ ]
+}
\ No newline at end of file
diff --git a/server/testcases/count.json b/server/testcases/count.json
new file mode 100644
index 00000000..ad96bb1d
--- /dev/null
+++ b/server/testcases/count.json
@@ -0,0 +1,61 @@
+{
+ "description": "check author table permissions",
+ "depends": [
+ "permission.json"
+ ],
+ "items": [
+ {
+ "name": "create author B1 as admin",
+ "url": "/api/1/table/author/insert",
+ "role": "admin",
+ "user_id": "1",
+ "status_code": 200,
+ "request": {
+ "objects": [
+ {
+ "name": "B1",
+ "auth_id": 1,
+ "email": "google@gmail.com"
+ },
+ {
+ "name": "B2",
+ "auth_id": 1,
+ "email": "google@pmail.com"
+ }
+ ],
+ "returning": [
+ "id"
+ ]
+ },
+ "response": {
+ "affected_rows": 2,
+ "returning": [
+ {
+ "id": 1
+ },
+ {
+ "id": 2
+ }
+ ]
+ },
+ "method": "POST"
+ },
+ {
+ "name": "create author B1 as admin",
+ "url": "/api/1/query",
+ "role": "admin",
+ "user_id": "1",
+ "response": {
+ "count": 2
+ },
+ "status_code": 200,
+ "request": {
+ "kind": "count",
+ "body": {
+ "table": "author"
+ }
+ },
+ "method": "POST"
+ }
+ ]
+}
\ No newline at end of file
diff --git a/server/testcases/create_array_relationship.json b/server/testcases/create_array_relationship.json
new file mode 100644
index 00000000..63cf437a
--- /dev/null
+++ b/server/testcases/create_array_relationship.json
@@ -0,0 +1,120 @@
+{
+ "description": "alter column type test",
+ "depends": [],
+ "items": [
+ {
+ "name": "create table",
+ "url": "/api/1/query",
+ "role": "admin",
+ "status_code": 200,
+ "method": "POST",
+ "user_id": "1",
+ "request": {
+ "kind": "create_table",
+ "body": {
+ "primary_key": [
+ "id"
+ ],
+ "name": "b",
+ "columns": [
+ {
+ "type": "serial",
+ "name": "id"
+ },
+ {
+ "type": "integer",
+ "name": "c"
+ }
+ ]
+ }
+ }
+ },
+ {
+ "name": "create table",
+ "url": "/api/1/query",
+ "role": "admin",
+ "status_code": 200,
+ "method": "POST",
+ "user_id": "1",
+ "request": {
+ "kind": "create_table",
+ "body": {
+ "primary_key": [
+ "id"
+ ],
+ "name": "a",
+ "columns": [
+ {
+ "type": "serial",
+ "name": "id"
+ },
+ {
+ "type": "integer",
+ "name": "b",
+ "references": {
+ "column": "id",
+ "table": "b"
+ }
+ }
+ ]
+ }
+ }
+ },
+ {
+ "name": "create arr rel",
+ "url": "/api/1/query",
+ "role": "admin",
+ "status_code": 200,
+ "method": "POST",
+ "user_id": "1",
+ "request": {
+ "kind": "create_array_relationship",
+ "body": {
+ "name": "a_rel",
+ "table": "b",
+ "using": {
+ "table": "a",
+ "column": "b"
+ }
+ }
+ }
+ },
+ {
+ "name": "create arr rel",
+ "url": "/api/1/query",
+ "role": "admin",
+ "status_code": 400,
+ "method": "POST",
+ "user_id": "1",
+ "request": {
+ "kind": "create_array_relationship",
+ "body": {
+ "name": "a_rel",
+ "table": "b",
+ "using": {
+ "table": "a",
+ "column": "b"
+ }
+ }
+ }
+ },
+ {
+ "name": "add column",
+ "url": "/api/1/query",
+ "user_id": "1",
+ "role": "admin",
+ "status_code": 400,
+ "method": "POST",
+ "request": {
+ "kind": "add_column",
+ "body" : {
+ "table": "b",
+ "column": {
+ "name": "a_rel",
+ "type": "integer"
+ }
+ }
+ }
+ }
+ ]
+}
\ No newline at end of file
diff --git a/server/testcases/create_object_relationship.json b/server/testcases/create_object_relationship.json
new file mode 100644
index 00000000..52784474
--- /dev/null
+++ b/server/testcases/create_object_relationship.json
@@ -0,0 +1,98 @@
+{
+ "description": "alter column type test",
+ "depends": [],
+ "items": [
+ {
+ "name": "create table",
+ "url": "/api/1/query",
+ "role": "admin",
+ "status_code": 200,
+ "method": "POST",
+ "user_id": "1",
+ "request": {
+ "kind": "create_table",
+ "body": {
+ "primary_key": [
+ "id"
+ ],
+ "name": "b",
+ "columns": [
+ {
+ "type": "serial",
+ "name": "id"
+ },
+ {
+ "type": "integer",
+ "name": "c"
+ }
+ ]
+ }
+ }
+ },
+ {
+ "name": "create table",
+ "url": "/api/1/query",
+ "role": "admin",
+ "status_code": 200,
+ "method": "POST",
+ "user_id": "1",
+ "request": {
+ "kind": "create_table",
+ "body": {
+ "primary_key": [
+ "id"
+ ],
+ "name": "a",
+ "columns": [
+ {
+ "type": "serial",
+ "name": "id"
+ },
+ {
+ "type": "integer",
+ "name": "b",
+ "references": {
+ "column": "id",
+ "table": "b"
+ }
+ }
+ ]
+ }
+ }
+ },
+ {
+ "name": "create obj rel",
+ "url": "/api/1/query",
+ "role": "admin",
+ "status_code": 200,
+ "method": "POST",
+ "user_id": "1",
+ "request": {
+ "kind": "create_object_relationship",
+ "body": {
+ "name": "b_rel",
+ "table": "a",
+ "using": "b"
+ }
+ }
+ },
+ {
+ "name": "add column",
+ "url": "/api/1/query",
+ "user_id": "1",
+ "role": "admin",
+ "status_code": 400,
+ "method": "POST",
+ "request": {
+ "kind": "add_column",
+ "body" : {
+ "table": "a",
+ "column": {
+ "name": "b_rel",
+ "type": "integer"
+ }
+ }
+ }
+ }
+ ]
+}
\ No newline at end of file
diff --git a/server/testcases/create_table.json b/server/testcases/create_table.json
new file mode 100644
index 00000000..13ee367a
--- /dev/null
+++ b/server/testcases/create_table.json
@@ -0,0 +1,83 @@
+{
+ "description": "schema test",
+ "depends": [],
+ "items": [
+ {
+ "name": "create_schema",
+ "url": "/api/1/query",
+ "role": "admin",
+ "status_code": 200,
+ "method": "POST",
+ "user_id": "1",
+ "request": {
+ "kind": "create_table",
+ "body": {
+ "primary_key": [
+ "id"
+ ],
+ "name": "author",
+ "columns": [
+ {
+ "type": "serial",
+ "name": "id"
+ },
+ {
+ "type": "varchar",
+ "name": "email"
+ },
+ {
+ "type": "varchar",
+ "name": "name"
+ },
+ {
+ "type": "integer",
+ "name": "auth_id"
+ }
+ ]
+ }
+ }
+ },
+ {
+ "name": "create_schema",
+ "url": "/api/1/query",
+ "role": "admin",
+ "status_code": 200,
+ "method": "POST",
+ "user_id": "1",
+ "request": {
+ "kind": "create_table",
+ "body": {
+ "primary_key": [
+ "id"
+ ],
+ "name": "article",
+ "columns": [{
+ "type": "serial",
+ "name": "id"
+ }]
+ }
+ }
+ },
+ {
+ "name": "create_schema",
+ "url": "/api/1/query",
+ "role": "admin",
+ "status_code": 400,
+ "method": "POST",
+ "user_id": "1",
+ "request": {
+ "kind": "create_table",
+ "body": {
+ "primary_key": [
+ "id"
+ ],
+ "name": "article",
+ "columns": [{
+ "type": "serial",
+ "name": "id"
+ }]
+ }
+ }
+ }
+ ]
+}
diff --git a/server/testcases/defaults.json b/server/testcases/defaults.json
new file mode 100644
index 00000000..ef071e9e
--- /dev/null
+++ b/server/testcases/defaults.json
@@ -0,0 +1,163 @@
+{
+ "description": "defaults test",
+ "depends": [],
+ "items": [
+ {
+ "name": "create_schema",
+ "url": "/api/1/query",
+ "role": "admin",
+ "status_code": 200,
+ "method": "POST",
+ "user_id": "1",
+ "sql_check": [
+ "SELECT 1 WHERE EXISTS(SELECT * from information_schema.columns where column_name='ts_sql' AND column_default='now()')"
+ ],
+ "request": {
+ "kind": "create_table",
+ "body": {
+ "primary_key": [
+ "id"
+ ],
+ "name": "a",
+ "columns": [
+ {
+ "type": "serial",
+ "name": "id"
+ },
+ {
+ "type": "timetz",
+ "name": "ts_sql",
+ "default": {
+ "__type": "expression",
+ "expression": "now()"
+ }
+ }
+ ]
+ }
+ }
+ },
+ {
+ "name": "create_schema",
+ "url": "/api/1/query",
+ "role": "admin",
+ "status_code": 400,
+ "method": "POST",
+ "user_id": "1",
+ "response": {
+ "path": "$.columns",
+ "error": "Expecting ISO8601 formatted time"
+ },
+ "request": {
+ "kind": "create_table",
+ "body": {
+ "primary_key": [
+ "id"
+ ],
+ "__type": "create_table",
+ "name": "b",
+ "columns": [
+ {
+ "type": "serial",
+ "name": "id"
+ },
+ {
+ "type": "timetz",
+ "name": "ts",
+ "default": {
+ "__type": "value",
+ "value": "now()"
+ }
+ }
+ ]
+ }
+ }
+ },
+ {
+ "name": "create_schema",
+ "url": "/api/1/query",
+ "role": "admin",
+ "status_code": 400,
+ "method": "POST",
+ "user_id": "1",
+ "response": {
+ "path": "$.columns",
+ "error": "expected Int32, encountered String"
+ },
+ "request": {
+ "kind": "create_table",
+ "body": {
+ "primary_key": [
+ "id"
+ ],
+ "__type": "create_table",
+ "name": "c",
+ "columns": [
+ {
+ "type": "serial",
+ "name": "id"
+ },
+ {
+ "type": "integer",
+ "name": "ts",
+ "default": "5"
+ }
+ ]
+ }
+ }
+ },
+ {
+ "name": "create_schema",
+ "url": "/api/1/query",
+ "role": "admin",
+ "status_code": 200,
+ "method": "POST",
+ "user_id": "1",
+ "sql_check": [
+ "SELECT 1 WHERE EXISTS(SELECT * from information_schema.columns where column_name='ts' AND column_default :: integer=5)"
+ ],
+ "request": {
+ "kind": "create_table",
+ "body": {
+ "primary_key": [
+ "id"
+ ],
+ "__type": "create_table",
+ "name": "c",
+ "columns": [
+ {
+ "type": "serial",
+ "name": "id"
+ },
+ {
+ "type": "integer",
+ "name": "ts",
+ "default": 5
+ }
+ ]
+ }
+ }
+ },
+ {
+ "name": "create_schema",
+ "url": "/api/1/query",
+ "role": "admin",
+ "status_code": 200,
+ "method": "POST",
+ "user_id": "1",
+ "sql_check": [
+ "SELECT 1 WHERE EXISTS(SELECT * from information_schema.columns where column_name='kk' AND column_default :: integer=100)"
+ ],
+ "request": {
+ "kind": "add_column",
+ "body": {
+ "table": "c",
+ "column": {
+ "type": "integer",
+ "default": 100,
+ "name": "kk"
+ }
+ }
+ }
+ }
+ ]
+}
\ No newline at end of file
diff --git a/server/testcases/drop_column.json b/server/testcases/drop_column.json
new file mode 100644
index 00000000..081c2961
--- /dev/null
+++ b/server/testcases/drop_column.json
@@ -0,0 +1,24 @@
+{
+ "description": "drop column test",
+ "depends": [
+ "permission.json"
+ ],
+ "items": [
+ {
+ "name": "create_schema",
+ "url": "/api/1/query",
+ "role": "admin",
+ "status_code": 200,
+ "method": "POST",
+ "user_id": "1",
+ "request": {
+ "kind": "drop_column",
+ "body":{
+ "table": "article",
+ "column": "author_id",
+ "cascade": true
+ }
+ }
+ }
+ ]
+}
\ No newline at end of file
diff --git a/server/testcases/drop_column_manual_delete.json b/server/testcases/drop_column_manual_delete.json
new file mode 100644
index 00000000..e1872fb6
--- /dev/null
+++ b/server/testcases/drop_column_manual_delete.json
@@ -0,0 +1,206 @@
+{
+ "description": "drop column test",
+ "depends": [
+ "permission.json"
+ ],
+ "items": [
+ {
+ "name": "drop col",
+ "url": "/api/1/query",
+ "role": "admin",
+ "status_code": 400,
+ "method": "POST",
+ "user_id": "1",
+ "response": {"path":"$","error":"can not delete due to the following dependent objects - [(\"article\",foreign_key_constraint,\"article_author_id_fkey\"),(\"article\",relationship,\"author\"),(\"author\",relationship,\"articles\"),(\"article\",permission,\"user (insert)\"),(\"article\",permission,\"user (select)\"),(\"article\",permission,\"user (update)\"),(\"article\",permission,\"user (delete)\")]"},
+ "request": {
+ "kind": "drop_column",
+ "body": {
+ "table": "article",
+ "column": "author_id",
+ "cascade": false
+ }
+ }
+ },
+ {
+ "name": "drop perm",
+ "url": "/api/1/query",
+ "role": "admin",
+ "status_code": 200,
+ "method": "POST",
+ "user_id": "1",
+ "request": {
+ "kind": "drop_insert_permission",
+ "body": {
+ "table": "article",
+ "role": "user"
+ }
+ }
+ },
+ {
+ "name": "drop perm",
+ "url": "/api/1/query",
+ "role": "admin",
+ "status_code": 200,
+ "method": "POST",
+ "user_id": "1",
+ "request": {
+ "kind": "drop_select_permission",
+ "body": {
+ "table": "article",
+ "role": "user"
+ }
+ }
+ },
+ {
+ "name": "drop perm",
+ "url": "/api/1/query",
+ "role": "admin",
+ "status_code": 200,
+ "method": "POST",
+ "user_id": "1",
+ "request": {
+ "kind": "drop_update_permission",
+ "body": {
+ "table": "article",
+ "role": "user"
+ }
+ }
+ },
+ {
+ "name": "drop perm",
+ "url": "/api/1/query",
+ "role": "admin",
+ "status_code": 200,
+ "method": "POST",
+ "user_id": "1",
+ "request": {
+ "kind": "drop_delete_permission",
+ "body": {
+ "table": "article",
+ "role": "user"
+ }
+ }
+ },
+ {
+ "name": "drop col",
+ "url": "/api/1/query",
+ "role": "admin",
+ "status_code": 400,
+ "method": "POST",
+ "user_id": "1",
+ "response": {
+ "path": "$",
+ "error": "can not delete due to the following dependent objects - [(\"article\",foreign_key_constraint,\"article_author_id_fkey\"),(\"article\",relationship,\"author\"),(\"author\",relationship,\"articles\")]"
+ },
+ "request": {
+ "kind": "drop_column",
+ "body": {
+ "table": "article",
+ "column": "author_id",
+ "cascade": false
+ }
+ }
+ },
+ {
+ "name": "drop article.author",
+ "url": "/api/1/query",
+ "role": "admin",
+ "status_code": 200,
+ "method": "POST",
+ "user_id": "1",
+ "request": {
+ "kind": "drop_relationship",
+ "body": {
+ "table": "article",
+ "relationship": "author"
+ }
+ }
+ },
+ {
+ "name": "drop col",
+ "url": "/api/1/query",
+ "role": "admin",
+ "status_code": 400,
+ "method": "POST",
+ "user_id": "1",
+ "response": {
+ "path": "$",
+ "error": "can not delete due to the following dependent objects - [(\"article\",foreign_key_constraint,\"article_author_id_fkey\"),(\"author\",relationship,\"articles\")]"
+ },
+ "request": {
+ "kind": "drop_column",
+ "body": {
+ "table": "article",
+ "column": "author_id",
+ "cascade": false
+ }
+ }
+ },
+ {
+ "name": "drop author.articles",
+ "url": "/api/1/query",
+ "role": "admin",
+ "status_code": 200,
+ "method": "POST",
+ "user_id": "1",
+ "request": {
+ "kind": "drop_relationship",
+ "body": {
+ "table": "author",
+ "relationship": "articles"
+ }
+ }
+ },
+ {
+ "name": "drop col",
+ "url": "/api/1/query",
+ "role": "admin",
+ "status_code": 400,
+ "method": "POST",
+ "user_id": "1",
+ "response": {
+ "path": "$",
+ "error": "can not delete due to the following dependent objects - [(\"article\",foreign_key_constraint,\"article_author_id_fkey\")]"
+ },
+ "request": {
+ "kind": "drop_column",
+ "body": {
+ "table": "article",
+ "column": "author_id",
+ "cascade": false
+ }
+ }
+ },
+ {
+ "name": "drop article_author_id_fkey",
+ "url": "/api/1/query",
+ "role": "admin",
+ "status_code": 200,
+ "method": "POST",
+ "user_id": "1",
+ "request": {
+ "kind": "drop_constraint",
+ "body": {
+ "table": "article",
+ "constraint": "article_author_id_fkey"
+ }
+ }
+ },
+ {
+ "name": "drop col",
+ "url": "/api/1/query",
+ "role": "admin",
+ "status_code": 200,
+ "method": "POST",
+ "user_id": "1",
+ "request": {
+ "kind": "drop_column",
+ "body": {
+ "table": "article",
+ "column": "author_id",
+ "cascade": false
+ }
+ }
+ }
+ ]
+}
\ No newline at end of file
diff --git a/server/testcases/drop_constraint.json b/server/testcases/drop_constraint.json
new file mode 100644
index 00000000..6fb1314d
--- /dev/null
+++ b/server/testcases/drop_constraint.json
@@ -0,0 +1,24 @@
+{
+ "description": "drop constraint test",
+ "depends": [
+ "permission.json"
+ ],
+ "items": [
+ {
+ "name": "create_schema",
+ "url": "/api/1/query",
+ "role": "admin",
+ "status_code": 200,
+ "method": "POST",
+ "user_id": "1",
+ "request": {
+ "kind": "drop_constraint",
+ "body": {
+ "table": "article",
+ "constraint": "article_author_id_fkey",
+ "cascade": true
+ }
+ }
+ }
+ ]
+}
\ No newline at end of file
diff --git a/server/testcases/drop_fkey_cons_manual.json b/server/testcases/drop_fkey_cons_manual.json
new file mode 100644
index 00000000..7a3903da
--- /dev/null
+++ b/server/testcases/drop_fkey_cons_manual.json
@@ -0,0 +1,113 @@
+{
+ "description": "drop fkey constraint test - manual",
+ "depends": [
+ "permission.json"
+ ],
+ "items": [
+ {
+ "name": "create_schema",
+ "url": "/api/1/query",
+ "role": "admin",
+ "status_code": 200,
+ "method": "POST",
+ "user_id": "1",
+ "request": {
+ "kind": "drop_insert_permission",
+ "body": {
+ "table": "article",
+ "role": "user"
+ }
+ }
+ },
+ {
+ "name": "create_schema",
+ "url": "/api/1/query",
+ "role": "admin",
+ "status_code": 200,
+ "method": "POST",
+ "user_id": "1",
+ "request": {
+ "kind": "drop_select_permission",
+ "body": {
+ "table": "article",
+ "role": "user"
+ }
+ }
+ },
+ {
+ "name": "create_schema",
+ "url": "/api/1/query",
+ "role": "admin",
+ "status_code": 200,
+ "method": "POST",
+ "user_id": "1",
+ "request": {
+ "kind": "drop_delete_permission",
+ "body": {
+ "table": "article",
+ "role": "user"
+ }
+ }
+ },
+ {
+ "name": "create_schema",
+ "url": "/api/1/query",
+ "role": "admin",
+ "status_code": 200,
+ "method": "POST",
+ "user_id": "1",
+ "request": {
+ "kind": "drop_update_permission",
+ "body": {
+ "table": "article",
+ "role": "user"
+ }
+ }
+ },
+ {
+ "name": "create_schema",
+ "url": "/api/1/query",
+ "role": "admin",
+ "status_code": 200,
+ "method": "POST",
+ "user_id": "1",
+ "request": {
+ "kind": "drop_relationship",
+ "body": {
+ "table": "article",
+ "relationship": "author"
+ }
+ }
+ },
+ {
+ "name": "create_schema",
+ "url": "/api/1/query",
+ "role": "admin",
+ "status_code": 200,
+ "method": "POST",
+ "user_id": "1",
+ "request": {
+ "kind": "drop_relationship",
+ "body": {
+ "table": "author",
+ "relationship": "articles"
+ }
+ }
+ },
+ {
+ "name": "create_schema",
+ "url": "/api/1/query",
+ "role": "admin",
+ "status_code": 200,
+ "method": "POST",
+ "user_id": "1",
+ "request": {
+ "kind": "drop_constraint",
+ "body": {
+ "table": "article",
+ "constraint": "article_author_id_fkey"
+ }
+ }
+ }
+ ]
+}
\ No newline at end of file
diff --git a/server/testcases/drop_pkey.json b/server/testcases/drop_pkey.json
new file mode 100644
index 00000000..9af365a2
--- /dev/null
+++ b/server/testcases/drop_pkey.json
@@ -0,0 +1,44 @@
+{
+ "description": "drop primary_key",
+ "depends": [],
+ "items": [
+ {
+ "name": "create_schema",
+ "url": "/api/1/query",
+ "role": "admin",
+ "status_code": 200,
+ "method": "POST",
+ "user_id": "1",
+ "request": {
+ "kind": "create_table",
+ "body": {
+ "primary_key": [
+ "id"
+ ],
+ "name": "a",
+ "columns": [
+ {
+ "type": "serial",
+ "name": "id"
+ }
+ ]
+ }
+ }
+ },
+ {
+ "name": "drop primary_key",
+ "url": "/api/1/query",
+ "role": "admin",
+ "status_code": 200,
+ "method": "POST",
+ "user_id": "1",
+ "request": {
+ "kind": "drop_constraint",
+ "body": {
+ "table": "a",
+ "constraint": "a_pkey"
+ }
+ }
+ }
+ ]
+}
\ No newline at end of file
diff --git a/server/testcases/drop_rel.json b/server/testcases/drop_rel.json
new file mode 100644
index 00000000..c55bdbde
--- /dev/null
+++ b/server/testcases/drop_rel.json
@@ -0,0 +1,25 @@
+{
+ "description": "drop rel test",
+ "depends": [
+ "permission.json"
+ ],
+ "items": [
+ {
+ "name": "create_schema",
+ "url": "/api/1/query",
+ "role": "admin",
+ "status_code": 200,
+ "method": "POST",
+ "user_id": "1",
+ "request": {
+ "kind": "drop_relationship",
+ "body": {
+ "__type": "drop_relationship",
+ "table": "article",
+ "relationship": "author",
+ "cascade": true
+ }
+ }
+ }
+ ]
+}
\ No newline at end of file
diff --git a/server/testcases/drop_rel_manual.json b/server/testcases/drop_rel_manual.json
new file mode 100644
index 00000000..a2ea7e25
--- /dev/null
+++ b/server/testcases/drop_rel_manual.json
@@ -0,0 +1,165 @@
+{
+ "description": "drop rel manual test",
+ "depends": [
+ "permission.json"
+ ],
+ "items": [
+ {
+ "name": "create_schema",
+ "url": "/api/1/query",
+ "role": "admin",
+ "status_code": 400,
+ "method": "POST",
+ "user_id": "1",
+ "response": {
+ "path": "$",
+ "error": "can not delete due to the following dependent objects - [(\"article\",permission,\"user (delete)\"),(\"article\",permission,\"user (insert)\"),(\"article\",permission,\"user (select)\"),(\"article\",permission,\"user (update)\")]"
+ },
+ "request": {
+ "kind": "drop_relationship",
+ "body": {
+ "table": "article",
+ "relationship": "author",
+ "cascade": false
+ }
+ }
+ },
+ {
+ "name": "create_schema",
+ "url": "/api/1/query",
+ "role": "admin",
+ "status_code": 200,
+ "method": "POST",
+ "user_id": "1",
+ "request": {
+ "kind": "drop_select_permission",
+ "body": {
+ "table": "article",
+ "role": "user"
+ }
+ }
+ },
+ {
+ "name": "create_schema",
+ "url": "/api/1/query",
+ "role": "admin",
+ "status_code": 400,
+ "method": "POST",
+ "user_id": "1",
+ "response": {
+ "path": "$",
+ "error": "can not delete due to the following dependent objects - [(\"article\",permission,\"user (delete)\"),(\"article\",permission,\"user (insert)\"),(\"article\",permission,\"user (update)\")]"
+ },
+ "request": {
+ "kind": "drop_relationship",
+ "body": {
+ "table": "article",
+ "relationship": "author",
+ "cascade": false
+ }
+ }
+ },
+ {
+ "name": "create_schema",
+ "url": "/api/1/query",
+ "role": "admin",
+ "status_code": 200,
+ "method": "POST",
+ "user_id": "1",
+ "request": {
+ "kind": "drop_insert_permission",
+ "body": {
+ "table": "article",
+ "role": "user"
+ }
+ }
+ },
+ {
+ "name": "create_schema",
+ "url": "/api/1/query",
+ "role": "admin",
+ "status_code": 400,
+ "method": "POST",
+ "user_id": "1",
+ "response": {
+ "path": "$",
+ "error": "can not delete due to the following dependent objects - [(\"article\",permission,\"user (delete)\"),(\"article\",permission,\"user (update)\")]"
+ },
+ "request": {
+ "kind": "drop_relationship",
+ "body": {
+ "table": "article",
+ "relationship": "author",
+ "cascade": false
+ }
+ }
+ },
+ {
+ "name": "create_schema",
+ "url": "/api/1/query",
+ "role": "admin",
+ "status_code": 200,
+ "method": "POST",
+ "user_id": "1",
+ "request": {
+ "kind": "drop_update_permission",
+ "body": {
+ "table": "article",
+ "role": "user"
+ }
+ }
+ },
+ {
+ "name": "create_schema",
+ "url": "/api/1/query",
+ "role": "admin",
+ "status_code": 400,
+ "method": "POST",
+ "user_id": "1",
+ "response": {
+ "path": "$",
+ "error": "can not delete due to the following dependent objects - [(\"article\",permission,\"user (delete)\")]"
+ },
+ "request": {
+ "kind": "drop_relationship",
+ "body": {
+ "table": "article",
+ "relationship": "author",
+ "cascade": false
+ }
+ }
+ },
+ {
+ "name": "create_schema",
+ "url": "/api/1/query",
+ "role": "admin",
+ "status_code": 200,
+ "method": "POST",
+ "user_id": "1",
+ "request": {
+ "kind": "drop_delete_permission",
+ "body": {
+ "table": "article",
+ "role": "user"
+ }
+ }
+ },
+ {
+ "name": "create_schema",
+ "url": "/api/1/query",
+ "role": "admin",
+ "status_code": 200,
+ "method": "POST",
+ "user_id": "1",
+ "request": {
+ "kind": "drop_relationship",
+ "body": {
+ "table": "article",
+ "relationship": "author",
+ "cascade": false
+ }
+ }
+ }
+
+ ]
+}
\ No newline at end of file
diff --git a/server/testcases/drop_table.json b/server/testcases/drop_table.json
new file mode 100644
index 00000000..bafe800c
--- /dev/null
+++ b/server/testcases/drop_table.json
@@ -0,0 +1,27 @@
+{
+ "description": "drop test",
+ "depends": [
+ "permission.json"
+ ],
+ "items": [
+ {
+ "name": "drop article no cascade",
+ "url": "/api/1/query",
+ "role": "admin",
+ "status_code": 400,
+ "method": "POST",
+ "user_id": "1",
+ "response": {
+ "path": "$",
+ "error": "can not delete due to the following dependent objects - [(\"author\",relationship,\"articles\"),(\"category\",relationship,\"articles\")]"
+ },
+ "request": {
+ "kind": "drop_table",
+ "body": {
+ "table": "article",
+ "cascade": false
+ }
+ }
+ }
+ ]
+}
\ No newline at end of file
diff --git a/server/testcases/drop_table_manual_delete.json b/server/testcases/drop_table_manual_delete.json
new file mode 100644
index 00000000..a7f5cbfc
--- /dev/null
+++ b/server/testcases/drop_table_manual_delete.json
@@ -0,0 +1,96 @@
+{
+ "description": "drop test",
+ "depends": [
+ "permission.json"
+ ],
+ "items": [
+ {
+ "name": "drop article no cascade",
+ "url": "/api/1/query",
+ "role": "admin",
+ "status_code": 400,
+ "method": "POST",
+ "user_id": "1",
+ "response": {
+ "path": "$",
+ "error": "can not delete due to the following dependent objects - [(\"author\",relationship,\"articles\"),(\"category\",relationship,\"articles\")]"
+ },
+ "request": {
+ "kind": "drop_table",
+ "body": {
+ "__type": "drop_table",
+ "table": "article",
+ "cascade": false
+ }
+ }
+ },
+ {
+ "name": "drop articles rel on author",
+ "url": "/api/1/query",
+ "role": "admin",
+ "status_code": 200,
+ "method": "POST",
+ "user_id": "1",
+ "request": {
+ "kind": "drop_relationship",
+ "body": {
+ "__type": "drop_relationship",
+ "table": "author",
+ "relationship": "articles"
+ }
+ }
+ },
+ {
+ "name": "drop article no cascade",
+ "url": "/api/1/query",
+ "role": "admin",
+ "status_code": 400,
+ "method": "POST",
+ "user_id": "1",
+ "response": {
+ "path": "$",
+ "error": "can not delete due to the following dependent objects - [(\"category\",relationship,\"articles\")]"
+ },
+ "request": {
+ "kind": "drop_table",
+ "body": {
+ "__type": "drop_table",
+ "table": "article",
+ "cascade": false
+ }
+ }
+ },
+ {
+ "name": "drop articles rel on category",
+ "url": "/api/1/query",
+ "role": "admin",
+ "status_code": 200,
+ "method": "POST",
+ "user_id": "1",
+ "request": {
+ "kind": "drop_relationship",
+ "body": {
+ "__type": "drop_relationship",
+ "table": "category",
+ "relationship": "articles"
+ }
+ }
+ },
+ {
+ "name": "drop article no cascade",
+ "url": "/api/1/query",
+ "role": "admin",
+ "status_code": 200,
+ "method": "POST",
+ "user_id": "1",
+ "request": {
+ "kind": "drop_table",
+ "body": {
+ "__type": "drop_table",
+ "table": "article",
+ "cascade": false
+ }
+ }
+ }
+ ]
+}
\ No newline at end of file
diff --git a/server/testcases/drop_view.json b/server/testcases/drop_view.json
new file mode 100644
index 00000000..1c6d7118
--- /dev/null
+++ b/server/testcases/drop_view.json
@@ -0,0 +1,40 @@
+{
+ "description": "drop view",
+ "depends": [],
+ "items": [
+ {
+ "name": "create_schema",
+ "url": "/api/1/query",
+ "role": "admin",
+ "status_code": 200,
+ "method": "POST",
+ "user_id": "1",
+ "pre_sql": [
+ "CREATE TABLE test(id serial)",
+ "CREATE VIEW test_view AS (SELECT id FROM test WHERE id > 10)"
+ ],
+ "request": {
+ "kind": "add_existing_view",
+ "body": {
+ "view": "test_view"
+ }
+ }
+ },
+ {
+ "name": "drop view",
+ "url": "/api/1/query",
+ "role": "admin",
+ "status_code": 200,
+ "method": "POST",
+ "user_id": "1",
+
+ "request": {
+ "kind": "drop_view",
+ "body": {
+ "view": "test_view",
+ "cascade": true
+ }
+ }
+ }
+ ]
+}
diff --git a/server/testcases/errors.json b/server/testcases/errors.json
new file mode 100644
index 00000000..208b7cd9
--- /dev/null
+++ b/server/testcases/errors.json
@@ -0,0 +1,218 @@
+{
+ "description": "error messages test",
+ "depends": [
+ "permission.json"
+ ],
+ "items": [
+ {
+ "name": "create author 1 as admin",
+ "url": "/api/1/table/author/insert",
+ "role": "admin",
+ "user_id": "1",
+ "status_code": 200,
+ "request": {
+ "objects": [
+ {
+ "name": "Balaji",
+ "auth_id": 1,
+ "email": "google@balaji.com",
+ "id": 1
+ }
+ ],
+ "returning": [
+ "id"
+ ]
+ },
+ "method": "POST"
+ },
+ {
+ "name": "create author 1 as admin",
+ "url": "/api/1/table/author/insert",
+ "role": "admin",
+ "user_id": "1",
+ "status_code": 400,
+ "request": {
+ "objects": [
+ {
+ "name": "Balaji",
+ "auth_id": 1,
+ "email": "google@balaji.com",
+ "id": 1
+ }
+ ],
+ "returning": [
+ "id"
+ ]
+ },
+ "response": {
+ "path": "$",
+ "error": "Uniqueness violation. duplicate key value violates unique constraint \"author_pkey\""
+ },
+ "method": "POST"
+ },
+ {
+ "name": "create article with missing author_id",
+ "url": "/api/1/table/article/insert",
+ "role": "admin",
+ "user_id": "1",
+ "status_code": 400,
+ "request": {
+ "objects": [
+ {
+ "title": "LISP",
+ "rating": 1,
+ "category_id": 2,
+ "content": "Nothing here",
+ "is_published": false
+ }
+ ],
+ "returning": [
+ "id"
+ ]
+ },
+ "response": {
+ "path": "$",
+ "error": "Not-NULL violation. null value in column \"author_id\" violates not-null constraint"
+ },
+ "method": "POST"
+ },
+ {
+ "name": "create article with invalid author_id",
+ "url": "/api/1/table/article/insert",
+ "role": "admin",
+ "user_id": "1",
+ "status_code": 400,
+ "request": {
+ "objects": [
+ {
+ "title": "LISP",
+ "rating": 1,
+ "category_id": 2,
+ "content": "Nothing here",
+ "is_published": false,
+ "author_id": 2
+ }
+ ],
+ "returning": [
+ "id"
+ ]
+ },
+ "response": {
+ "path": "$",
+ "error": "Foreign key violation. insert or update on table \"article\" violates foreign key constraint \"article_author_id_fkey\""
+ },
+ "method": "POST"
+ },
+ {
+ "name": "create category",
+ "url": "/api/1/table/category/insert",
+ "role": "admin",
+ "user_id": "1",
+ "status_code": 200,
+ "request": {
+ "objects": [
+ {
+ "name": "Timepass",
+ "description": "Timepass stuff",
+ "id": 1
+ }
+ ],
+ "returning": [
+ "id"
+ ]
+ },
+ "method": "POST"
+ },
+ {
+ "name": "create valid article",
+ "url": "/api/1/table/article/insert",
+ "role": "admin",
+ "user_id": "1",
+ "status_code": 200,
+ "request": {
+ "objects": [
+ {
+ "title": "LISP",
+ "rating": 1,
+ "category_id": 1,
+ "content": "Nothing here",
+ "is_published": false,
+ "author_id": 1,
+ "id": 1
+ }
+ ],
+ "returning": [
+ "id"
+ ]
+ },
+ "method": "POST"
+ },
+ {
+ "name": "delete the author",
+ "url": "/api/1/query",
+ "role": "admin",
+ "user_id": "1",
+ "response": {
+ "path": "$",
+ "error": "Foreign key violation. update or delete on table \"author\" violates foreign key constraint \"article_author_id_fkey\" on table \"article\""
+ },
+ "status_code": 400,
+ "request": {
+ "kind": "delete",
+ "body": {
+ "table": "author",
+ "where": {
+ "id": 1
+ }
+ }
+ },
+ "method": "POST"
+ },
+ {
+ "name": "update the author",
+ "url": "/api/1/table/author/update",
+ "role": "admin",
+ "user_id": "1",
+ "status_code": 400,
+ "request": {
+ "where": {
+ "id": 1
+ },
+ "$set": {
+ "id": 20
+ },
+ "returning": [
+ "id"
+ ]
+ },
+ "response": {
+ "path": "$",
+ "error": "Foreign key violation. update or delete on table \"author\" violates foreign key constraint \"article_author_id_fkey\" on table \"article\""
+ },
+ "method": "POST"
+ },
+ {
+ "name": "update the article",
+ "url": "/api/1/table/article/update",
+ "role": "admin",
+ "user_id": "1",
+ "status_code": 400,
+ "request": {
+ "where": {
+ "id": 1
+ },
+ "$set": {
+ "author_id": 2
+ },
+ "returning": [
+ "id"
+ ]
+ },
+ "response": {
+ "path": "$",
+ "error": "Foreign key violation. insert or update on table \"article\" violates foreign key constraint \"article_author_id_fkey\""
+ },
+ "method": "POST"
+ }
+ ]
+}
\ No newline at end of file
diff --git a/server/testcases/indexes.json b/server/testcases/indexes.json
new file mode 100644
index 00000000..ca500ca2
--- /dev/null
+++ b/server/testcases/indexes.json
@@ -0,0 +1,204 @@
+{
+ "description": "indexes test",
+ "depends": [],
+ "items": [
+ {
+ "name": "create_body",
+ "url": "/api/1/query",
+ "role": "admin",
+ "status_code": 200,
+ "method": "POST",
+ "user_id": "1",
+ "request": {
+ "kind": "create_table",
+ "body": {
+ "primary_key": [
+ "id"
+ ],
+ "name": "a",
+ "columns": [
+ {
+ "type": "serial",
+ "name": "id"
+ },
+ {
+ "type": "integer",
+ "name": "a"
+ },
+ {
+ "type": "integer",
+ "name": "b"
+ },
+ {
+ "type": "integer",
+ "name": "c"
+ },
+ {
+ "type": "varchar",
+ "name": "d"
+ }
+ ]
+ }
+ }
+ },
+ {
+ "name": "create index only cols",
+ "url": "/api/1/query",
+ "role": "admin",
+ "status_code": 200,
+ "method": "POST",
+ "user_id": "1",
+ "sql_check": [
+ "select * from pg_catalog.pg_indexes where indexdef = 'CREATE INDEX a_a_b_c_d_idx ON a USING btree (a, b, c NULLS FIRST, d DESC)'"
+ ],
+ "request": {
+ "kind": "create_index",
+ "body": {
+ "table": "a",
+ "columns": [
+ "a",
+ "+b",
+ {
+ "column": "c",
+ "nulls": "first"
+ },
+ {
+ "column": "d",
+ "nulls": "first",
+ "order": "desc"
+ }
+ ]
+ }
+ }
+ },
+ {
+ "name": "create index only cols + using",
+ "url": "/api/1/query",
+ "role": "admin",
+ "status_code": 200,
+ "method": "POST",
+ "user_id": "1",
+ "sql_check": [
+ "select * from pg_catalog.pg_indexes where indexdef = 'CREATE INDEX a_a_idx ON a USING btree (a)'"
+ ],
+ "request": {
+ "kind": "create_index",
+ "body": {
+ "table": "a",
+ "using": "btree",
+ "columns": [
+ "a"
+ ]
+ }
+ }
+ },
+ {
+ "name": "create index only cols + using + with",
+ "url": "/api/1/query",
+ "role": "admin",
+ "status_code": 200,
+ "method": "POST",
+ "user_id": "1",
+ "sql_check": [
+ "select * from pg_catalog.pg_indexes where indexdef = 'CREATE INDEX a_b_idx ON a USING btree (b) WITH (fillfactor=50)'"
+ ],
+ "request": {
+ "kind": "create_index",
+ "body": {
+ "table": "a",
+ "using": "btree",
+ "with": {
+ "fillfactor": 50
+ },
+ "columns": [
+ "b"
+ ]
+ }
+ }
+ },
+ {
+ "name": "create index only cols + name",
+ "url": "/api/1/query",
+ "role": "admin",
+ "status_code": 200,
+ "method": "POST",
+ "user_id": "1",
+ "sql_check": [
+ "select * from pg_catalog.pg_indexes where indexdef = 'CREATE INDEX myindex ON a USING btree (c)'"
+ ],
+ "request": {
+ "kind": "create_index",
+ "body": {
+ "name": "myindex",
+ "table": "a",
+ "columns": [
+ "c"
+ ]
+ }
+ }
+ },
+ {
+ "name": "create index only cols + unique",
+ "url": "/api/1/query",
+ "role": "admin",
+ "status_code": 200,
+ "method": "POST",
+ "user_id": "1",
+ "sql_check": [
+ "select * from pg_catalog.pg_indexes where indexdef = 'CREATE UNIQUE INDEX a_b_idx1 ON a USING btree (b)'"
+ ],
+ "request": {
+ "kind": "create_index",
+ "body": {
+ "unique": true,
+ "table": "a",
+ "columns": [
+ "b"
+ ]
+ }
+ }
+ },
+ {
+ "name": "create index only cols + where",
+ "url": "/api/1/query",
+ "role": "admin",
+ "status_code": 200,
+ "method": "POST",
+ "user_id": "1",
+ "sql_check": [
+ "select * from pg_catalog.pg_indexes where indexdef = 'CREATE INDEX a_b_idx2 ON a USING btree (b) WHERE (((id > 1000) AND true) AND true)'"
+ ],
+ "request": {
+ "kind": "create_index",
+ "body": {
+ "table": "a",
+ "where": {
+ "id": {
+ "$gt": 1000
+ }
+ },
+ "columns": [
+ "b"
+ ]
+ }
+ }
+ },
+ {
+ "name": "create index only cols + where",
+ "url": "/api/1/query",
+ "role": "admin",
+ "status_code": 200,
+ "method": "POST",
+ "user_id": "1",
+ "sql_check": [
+ "select 1 where not exists (select * from pg_catalog.pg_indexes where indexname = 'myindex')"
+ ],
+ "request": {
+ "kind": "drop_index",
+ "body": {
+ "index": "myindex"
+ }
+ }
+ }
+ ]
+}
\ No newline at end of file
diff --git a/server/testcases/json_jsonb.json b/server/testcases/json_jsonb.json
new file mode 100644
index 00000000..6d906949
--- /dev/null
+++ b/server/testcases/json_jsonb.json
@@ -0,0 +1,113 @@
+{
+ "description": "json and jsonb column type test",
+ "depends": [],
+ "items": [
+ {
+ "name": "create_schema",
+ "url": "/api/1/query",
+ "role": "admin",
+ "status_code": 200,
+ "method": "POST",
+ "user_id": "1",
+ "sql_check": [
+ "select * from information_schema.columns where data_type = 'jsonb' and table_name = 'a' AND table_schema='public'",
+ "select * from information_schema.columns where data_type = 'json' and table_name = 'a' AND table_schema = 'public'"
+ ],
+ "request": {
+ "kind": "create_table",
+ "body":
+ {
+ "primary_key": [
+ "id"
+ ],
+ "name": "a",
+ "columns": [
+ {
+ "type": "serial",
+ "name": "id"
+ },
+ {
+ "type": "json",
+ "name": "a"
+ },
+ {
+ "type": "jsonb",
+ "name": "b"
+ }
+ ]
+ }
+ }
+ },
+ {
+ "name": "set permission with filter on json column",
+ "url": "/api/1/query",
+ "role": "admin",
+ "status_code": 400,
+ "method": "POST",
+ "user_id": "1",
+ "response": {"path":"$.perm.filter","error":"JSON column can not be part of where clause"},
+ "request": {
+ "kind": "create_select_permission",
+ "body":
+ {
+ "role": "user",
+ "perm": {
+ "columns": ["id"],
+ "filter": {
+ "a": {"x" : 1}
+ }
+ },
+ "table": "a"
+ }
+
+ }
+ },
+ {
+ "name": "insert object",
+ "url": "/api/1/table/a/insert",
+ "role": "admin",
+ "status_code": 200,
+ "method": "POST",
+ "user_id": "1",
+ "request": {
+ "objects": [{
+ "a": {"a" : "b", "c": "d"},
+ "b": {"a" : "b", "c": "d"}
+ }],
+ "returning": [
+ "id", "a"
+ ]
+ }
+ },
+ {
+ "name": "select with 'where' on JSONB column",
+ "url": "/api/1/table/a/select",
+ "role": "admin",
+ "status_code": 400,
+ "method": "POST",
+ "user_id": "1",
+ "response": {"path":"$.where","error":"JSONB column can not be part of where clause"},
+ "request": {
+ "columns": ["id"],
+ "where" : {
+ "b": { "$eq": {"a": "b", "c" : "d"}}
+ }
+ }
+ },
+ {
+ "name": "select with 'where' on JSON column",
+ "url": "/api/1/table/a/select",
+ "role": "admin",
+ "status_code": 400,
+ "method": "POST",
+ "user_id": "1",
+ "response": {"path":"$.where","error":"JSON column can not be part of where clause"},
+ "request": {
+ "columns": ["id"],
+ "where" : {
+ "a": { "$eq": {"a": "b", "c" : "d"}}
+ }
+ }
+ }
+ ]
+ }
\ No newline at end of file
diff --git a/server/testcases/no_permission_category.json b/server/testcases/no_permission_category.json
new file mode 100644
index 00000000..e29b86ba
--- /dev/null
+++ b/server/testcases/no_permission_category.json
@@ -0,0 +1,173 @@
+{
+ "description": "basic permissions check",
+ "depends": [
+ "author_article_category.json"
+ ],
+ "items": [
+ {
+ "name": "declare select permissions on author",
+ "status_code": 200,
+ "url": "/api/1/query",
+ "role": "admin",
+ "user_id": "1",
+ "method": "POST",
+ "request": {
+ "kind": "create_select_permission",
+ "body": {
+ "role": "user",
+ "table": "author",
+ "perm": {
+ "columns": [
+ "id",
+ "name",
+ "auth_id"
+ ],
+ "filter": {
+ "auth_id": "REQ_USER_ID"
+ }
+ }
+ }
+ }
+ },
+ {
+ "name": "declare update permissions on author",
+ "status_code": 200,
+ "url": "/api/1/query",
+ "role": "admin",
+ "user_id": "1",
+ "method": "POST",
+ "request": {
+ "kind": "create_update_permission",
+ "body": {
+ "role": "user",
+ "table": "author",
+ "perm": {
+ "columns": [
+ "name"
+ ],
+ "filter": {
+ "auth_id": "REQ_USER_ID"
+ }
+ }
+ }
+ }
+ },
+ {
+ "name": "declare insert permissions on article",
+ "status_code": 200,
+ "url": "/api/1/query",
+ "role": "admin",
+ "user_id": "1",
+ "method": "POST",
+ "request": {
+ "kind": "create_insert_permission",
+ "body": {
+ "role": "user",
+ "table": "article",
+ "perm": {
+ "check": {
+ "author": {
+ "auth_id": "REQ_USER_ID"
+ }
+ }
+ }
+ }
+ }
+ },
+ {
+ "name": "declare select permissions on article",
+ "status_code": 200,
+ "url": "/api/1/query",
+ "role": "admin",
+ "user_id": "1",
+ "method": "POST",
+ "request": {
+ "kind": "create_select_permission",
+ "body": {
+ "role": "user",
+ "table": "article",
+ "perm": {
+ "override": {
+ "author": {
+ "columns": [
+ "id",
+ "name"
+ ]
+ }
+ },
+ "columns": [
+ "id",
+ "title",
+ "content",
+ "is_published",
+ "rating",
+ "author_id"
+ ],
+ "filter": {
+ "$or": [
+ {
+ "author": {
+ "auth_id": "REQ_USER_ID"
+ }
+ },
+ {
+ "is_published": true
+ }
+ ]
+ }
+ }
+ }
+ }
+ },
+ {
+ "name": "declare update permissions on article",
+ "status_code": 200,
+ "url": "/api/1/query",
+ "role": "admin",
+ "user_id": "1",
+ "method": "POST",
+ "request": {
+ "kind": "create_update_permission",
+ "body": {
+ "role": "user",
+ "table": "article",
+ "perm": {
+ "columns": [
+ "title",
+ "content",
+ "is_published",
+ "rating"
+ ],
+ "filter": {
+ "author": {
+ "auth_id": "REQ_USER_ID"
+ }
+ }
+ }
+ }
+ }
+ },
+ {
+ "name": "declare delete permissions on article",
+ "status_code": 200,
+ "url": "/api/1/query",
+ "role": "admin",
+ "user_id": "1",
+ "method": "POST",
+ "request": {
+ "kind": "create_delete_permission",
+ "body": {
+ "role": "user",
+ "table": "article",
+ "perm": {
+ "filter": {
+ "author": {
+ "auth_id": "REQ_USER_ID"
+ }
+ }
+ }
+ }
+ }
+ }
+ ]
+}
\ No newline at end of file
diff --git a/server/testcases/not_working/alter_foreign_key.json b/server/testcases/not_working/alter_foreign_key.json
new file mode 100644
index 00000000..43db67c1
--- /dev/null
+++ b/server/testcases/not_working/alter_foreign_key.json
@@ -0,0 +1,166 @@
+{
+ "description": "alter foreign key",
+ "depends": [],
+ "items": [
+ {
+ "name": "create_schema",
+ "url": "/api/1/query",
+ "role": "admin",
+ "user_id": "1",
+ "status_code": 200,
+ "method": "POST",
+ "request": {
+ "kind": "create_table",
+ "body": {
+ "name": "a",
+ "primary_key": [
+ "x"
+ ],
+ "columns": [
+ {
+ "name": "x",
+ "type": "integer"
+ },
+ {
+ "name": "y",
+ "type": "integer"
+ }
+ ]
+ }
+ }
+ },
+ {
+ "name": "create_schema",
+ "url": "/api/1/query",
+ "role": "admin",
+ "user_id": "1",
+ "status_code": 200,
+ "method": "POST",
+ "request": {
+ "kind": "create_table",
+ "body": {
+ "name": "b",
+ "primary_key": [
+ "x"
+ ],
+ "columns": [
+ {
+ "name": "x",
+ "type": "integer",
+ "references": {
+ "column": "x",
+ "table": "a"
+ }
+ },
+ {
+ "name": "y",
+ "type": "integer"
+ }
+ ]
+ }
+ }
+ },
+ {
+ "name": "add unique constraint on table a",
+ "url": "/api/1/query",
+ "role": "admin",
+ "user_id": "1",
+ "status_code": 200,
+ "method": "POST",
+ "request": {
+ "kind": "add_unique_constraint",
+ "body": {
+ "table": "a",
+ "constraint": {
+ "__type": "unique_constraint",
+ "columns": [
+ "x",
+ "y"
+ ]
+ }
+ }
+ }
+ },
+ {
+ "name": "alter foreign key constraint on table b",
+ "url": "/api/1/query",
+ "role": "admin",
+ "user_id": "1",
+ "status_code": 200,
+ "method": "POST",
+ "request": {
+ "kind": "alter_foreign_key_constraint",
+ "body": {
+ "table": "b",
+ "constraint": {
+ "name": "b_x_fkey",
+ "mapping": {
+ "x": "x",
+ "y": "y"
+ },
+ "ref_table": "a"
+ }
+ }
+ }
+ },
+ {
+ "name": "alter foreign key constraint on table b",
+ "url": "/api/1/query",
+ "role": "admin",
+ "user_id": "1",
+ "status_code": 400,
+ "method": "POST",
+ "sql_check": [
+ "select 1 where exists (select sum(ordinal_position) from information_schema.key_column_usage where constraint_name = 'b_x_fkey' GROUP BY constraint_schema, constraint_name);"
+ ],
+ "request": {
+ "kind": "add_foreign_key_constraint",
+ "body": {
+ "table": "b",
+ "constraint": {
+ "name": "b_x_fkey",
+ "mapping": {
+ "x": "x",
+ "y": "y"
+ },
+ "ref_table": "a"
+ }
+ }
+ }
+ },
+ {
+ "name": "can't create relationship on b_x_fkey",
+ "url": "/api/1/query",
+ "role": "admin",
+ "user_id": "1",
+ "status_code": 400,
+ "method": "POST",
+ "request": {
+ "kind": "create_object_relationship",
+ "body": {
+ "using": "b_x_fkey",
+ "name": "xx",
+ "table": "b"
+ }
+ }
+ },
+ {
+ "name": "drop foreign key on non existing table",
+ "url": "/api/1/query",
+ "role": "admin",
+ "user_id": "1",
+ "status_code": 400,
+ "method": "POST",
+ "sql_check": [
+ "select 1 where exists (select sum(ordinal_position) from information_schema.key_column_usage where constraint_name = 'b_x_fkey' GROUP BY constraint_schema, constraint_name);"
+ ],
+ "request": {
+ "kind": "drop_constraint",
+ "body": {
+ "constraint": "b_x_fkey",
+ "table": "z"
+ }
+ }
+ }
+ ]
+}
\ No newline at end of file
diff --git a/server/testcases/permission.json b/server/testcases/permission.json
new file mode 100644
index 00000000..0601d000
--- /dev/null
+++ b/server/testcases/permission.json
@@ -0,0 +1,196 @@
+{
+ "description": "basic permissions check",
+ "depends": [
+ "author_article_category.json"
+ ],
+ "items": [
+ {
+ "name": "declare select permissions on category",
+ "status_code": 200,
+ "url": "/api/1/query",
+ "role": "admin",
+ "user_id": "1",
+ "method": "POST",
+ "request": {
+ "kind": "create_select_permission",
+ "body": {
+ "role": "user",
+ "table": "category",
+ "perm": {
+ "columns": [
+ "id",
+ "name",
+ "description"
+ ],
+ "filter": {}
+ }
+ }
+ }
+ },
+ {
+ "name": "declare select permissions on author",
+ "status_code": 200,
+ "url": "/api/1/query",
+ "role": "admin",
+ "user_id": "1",
+ "method": "POST",
+ "request": {
+ "kind": "create_select_permission",
+ "body": {
+ "role": "user",
+ "table": "author",
+ "perm": {
+ "columns": [
+ "id",
+ "name",
+ "auth_id"
+ ],
+ "filter": {
+ "auth_id": "REQ_USER_ID"
+ }
+ }
+ }
+ }
+ },
+ {
+ "name": "declare update permissions on author",
+ "status_code": 200,
+ "url": "/api/1/query",
+ "role": "admin",
+ "user_id": "1",
+ "method": "POST",
+ "request": {
+ "kind": "create_update_permission",
+ "body": {
+ "role": "user",
+ "table": "author",
+ "perm": {
+ "columns": [
+ "name"
+ ],
+ "filter": {
+ "auth_id": "REQ_USER_ID"
+ }
+ }
+ }
+ }
+ },
+ {
+ "name": "declare insert permissions on article",
+ "status_code": 200,
+ "url": "/api/1/query",
+ "role": "admin",
+ "user_id": "1",
+ "method": "POST",
+ "request": {
+ "kind": "create_insert_permission",
+ "body": {
+ "role": "user",
+ "table": "article",
+ "perm": {
+ "check": {
+ "author": {
+ "auth_id": "REQ_USER_ID"
+ }
+ }
+ }
+ }
+ }
+ },
+ {
+ "name": "declare select permissions on article",
+ "status_code": 200,
+ "url": "/api/1/query",
+ "role": "admin",
+ "user_id": "1",
+ "method": "POST",
+ "request": {
+ "kind": "create_select_permission",
+ "body": {
+ "role": "user",
+ "table": "article",
+ "perm": {
+ "override": {
+ "author": {
+ "columns": [
+ "id",
+ "name"
+ ]
+ }
+ },
+ "columns": [
+ "id",
+ "title",
+ "content",
+ "is_published",
+ "rating",
+ "author_id"
+ ],
+ "filter": {
+ "$or": [
+ {
+ "author": {
+ "auth_id": "REQ_USER_ID"
+ }
+ },
+ {
+ "is_published": true
+ }
+ ]
+ }
+ }
+ }
+ }
+ },
+ {
+ "name": "declare update permissions on article",
+ "status_code": 200,
+ "url": "/api/1/query",
+ "role": "admin",
+ "user_id": "1",
+ "method": "POST",
+ "request": {
+ "kind": "create_update_permission",
+ "body": {
+ "role": "user",
+ "table": "article",
+ "perm": {
+ "columns": [
+ "title",
+ "content",
+ "is_published",
+ "rating"
+ ],
+ "filter": {
+ "author": {
+ "auth_id": "REQ_USER_ID"
+ }
+ }
+ }
+ }
+ }
+ },
+ {
+ "name": "declare delete permissions on article",
+ "status_code": 200,
+ "url": "/api/1/query",
+ "role": "admin",
+ "user_id": "1",
+ "method": "POST",
+ "request": {
+ "kind": "create_delete_permission",
+ "body": {
+ "role": "user",
+ "table": "article",
+ "perm": {
+ "filter": {
+ "author": {
+ "auth_id": "REQ_USER_ID"
+ }
+ }
+ }
+ }
+ }
+ }
+ ]
+}
\ No newline at end of file
diff --git a/server/testcases/update_ops.json b/server/testcases/update_ops.json
new file mode 100644
index 00000000..37536bc7
--- /dev/null
+++ b/server/testcases/update_ops.json
@@ -0,0 +1,118 @@
+{
+ "description": "json and jsonb column type test",
+ "depends": [],
+ "items": [
+ {
+ "name": "create_schema",
+ "url": "/api/1/query",
+ "role": "admin",
+ "status_code": 200,
+ "method": "POST",
+ "user_id": "1",
+ "request": {
+ "kind": "create_table",
+ "body":
+ {
+ "primary_key": [
+ "id"
+ ],
+ "name": "a",
+ "columns": [
+ {
+ "type": "serial",
+ "name": "id"
+ },
+ {
+ "type": "integer",
+ "name": "a"
+ },
+ {
+ "type": "timetz",
+ "name": "b",
+ "default": "03:21:55+00:00"
+ },
+ {
+ "type": "varchar",
+ "name": "c"
+ }
+ ]
+ }
+
+ }
+ },
+ {
+ "name": "create entry",
+ "url": "/api/1/table/a/insert",
+ "role": "admin",
+ "status_code": 200,
+ "method": "POST",
+ "user_id": "1",
+ "request": {
+ "objects": [
+ {
+ "a": 1,
+ "b": "04:21:55+00:00",
+ "c": "World"
+ }
+ ]
+ }
+ },
+ {
+ "name": "inc and default update",
+ "url": "/api/1/table/a/update",
+ "role": "admin",
+ "status_code": 200,
+ "method": "POST",
+ "user_id": "1",
+ "response": {"returning":[{"a":21,"b":"03:21:55+00"}],"affected_rows":1},
+ "request": {
+ "where": {},
+ "$set": {"c": "Hello"},
+ "$inc": {
+ "a" : 20
+ },
+ "$default": ["b"],
+ "returning": ["a", "b"]
+
+ }
+ },
+ {
+ "name": "mul update",
+ "url": "/api/1/table/a/update",
+ "role": "admin",
+ "status_code": 200,
+ "method": "POST",
+ "user_id": "1",
+ "response": {"returning":[{"a":420,"b":"03:21:55+00"}],"affected_rows":1},
+ "request": {
+ "where": {},
+ "$mul": {
+ "a" : 20
+ },
+ "returning": ["a", "b"]
+
+ }
+ },
+ {
+ "name": "multiple column appearences",
+ "url": "/api/1/table/a/update",
+ "role": "admin",
+ "status_code": 400,
+ "method": "POST",
+ "user_id": "1",
+ "response": {"path":"$","error":"syntax error. \"multiple assignments to same column \\\"a\\\"\""},
+ "request": {
+ "where": {},
+ "$inc": {
+ "a" : 20
+ },
+ "$mul": {
+ "a" : 20
+ },
+ "returning": ["a", "b"]
+
+ }
+ }
+
+ ]
+ }
\ No newline at end of file
diff --git a/server/testcases/user_permission_non_admin.json b/server/testcases/user_permission_non_admin.json
new file mode 100644
index 00000000..6f887f54
--- /dev/null
+++ b/server/testcases/user_permission_non_admin.json
@@ -0,0 +1,31 @@
+{
+ "description": "basic permissions check",
+ "depends": [
+ "author_article_category.json"
+ ],
+ "items": [
+ {
+ "name": "declare select permissions on category",
+ "status_code": 400,
+ "url": "/api/1/query",
+ "role": "user",
+ "user_id": "1",
+ "method": "POST",
+ "request": {
+ "kind": "create_select_permission",
+ "body": {
+ "role": "user",
+ "table": "category",
+ "perm": {
+ "columns": [
+ "id",
+ "name",
+ "description"
+ ],
+ "filter": {}
+ }
+ }
+ }
+ }
+ ]
+ }
\ No newline at end of file
diff --git a/server/testcases/wildcard.json b/server/testcases/wildcard.json
new file mode 100644
index 00000000..e9afe524
--- /dev/null
+++ b/server/testcases/wildcard.json
@@ -0,0 +1,331 @@
+{
+ "description": "check wildcards",
+ "depends": [
+ "permission.json"
+ ],
+ "items": [
+ {
+ "name": "create author B1 as admin",
+ "url": "/api/1/table/author/insert",
+ "role": "admin",
+ "user_id": "1",
+ "status_code": 200,
+ "request": {
+ "objects": [{
+ "name": "James Bond",
+ "auth_id": 1,
+ "email": "google@gmail.com"
+ }],
+ "returning": [
+ "id"
+ ]
+ },
+ "response": {
+ "affected_rows": 1,
+ "returning": [
+ {
+ "id": 1
+ }
+ ]
+ },
+ "method": "POST"
+ },
+ {
+ "name": "create category",
+ "url": "/api/1/table/category/insert",
+ "role": "admin",
+ "user_id": "1",
+ "status_code": 200,
+ "request": {
+ "objects": [{
+ "name": "Haskell",
+ "description": "Haskell Haskell Haskell"
+ }],
+ "returning": [
+ "id"
+ ]
+ },
+ "response": {
+ "affected_rows": 1,
+ "returning": [
+ {
+ "id": 1
+ }
+ ]
+ },
+ "method": "POST"
+ },
+ {
+ "name": "create article under B1",
+ "url": "/api/1/table/article/insert",
+ "role": "admin",
+ "user_id": "1",
+ "status_code": 200,
+ "request": {
+ "objects": [{
+ "title": "Dependent Haskell",
+ "author_id": 1,
+ "content": "Heloo world",
+ "is_published": false,
+ "rating": 3,
+ "category_id": 1
+ }],
+ "returning": [
+ "id"
+ ]
+ },
+ "response": {
+ "affected_rows": 1,
+ "returning": [
+ {
+ "id": 1
+ }
+ ]
+ },
+ "method": "POST"
+ },
+ {
+ "name": "selct * as admin",
+ "url": "/api/1/table/author/select",
+ "role": "admin",
+ "user_id": "1",
+ "status_code": 200,
+ "request": {
+ "columns": [
+ "*"
+ ]
+ },
+ "response": [
+ {
+ "name": "James Bond",
+ "id": 1,
+ "auth_id": 1,
+ "email": "google@gmail.com"
+ }
+ ],
+ "method": "POST"
+ },
+ {
+ "name": "selct *.* as admin",
+ "url": "/api/1/table/author/select",
+ "role": "admin",
+ "user_id": "1",
+ "status_code": 200,
+ "request": {
+ "columns": [
+ "*.*"
+ ]
+ },
+ "response": [
+ {
+ "email": "google@gmail.com",
+ "auth_id": 1,
+ "name": "James Bond",
+ "articles": [
+ {
+ "rating": 3,
+ "author_id": 1,
+ "category_id": 1,
+ "content": "Heloo world",
+ "is_published": false,
+ "id": 1,
+ "title": "Dependent Haskell"
+ }
+ ],
+ "id": 1
+ }
+ ],
+ "method": "POST"
+ },
+ {
+ "name": "selct *.* as admin",
+ "url": "/api/1/table/author/select",
+ "role": "admin",
+ "user_id": "1",
+ "status_code": 200,
+ "request": {
+ "columns": [
+ "*.*",
+ "name"
+ ]
+ },
+ "response": [
+ {
+ "email": "google@gmail.com",
+ "auth_id": 1,
+ "name": "James Bond",
+ "articles": [
+ {
+ "rating": 3,
+ "author_id": 1,
+ "category_id": 1,
+ "content": "Heloo world",
+ "is_published": false,
+ "id": 1,
+ "title": "Dependent Haskell"
+ }
+ ],
+ "id": 1
+ }
+ ],
+ "method": "POST"
+ },
+ {
+ "name": "selct *.* as admin",
+ "url": "/api/1/table/author/select",
+ "role": "admin",
+ "user_id": "1",
+ "status_code": 200,
+ "request": {
+ "columns": [
+ "*.*",
+ {
+ "name": "articles",
+ "columns": [
+ "title"
+ ]
+ }
+ ]
+ },
+ "response": [
+ {
+ "name": "James Bond",
+ "id": 1,
+ "auth_id": 1,
+ "email": "google@gmail.com",
+ "articles": [
+ {
+ "title": "Dependent Haskell"
+ }
+ ]
+ }
+ ],
+ "method": "POST"
+ },
+ {
+ "name": "selct *.*.* as admin",
+ "url": "/api/1/table/author/select",
+ "role": "admin",
+ "user_id": "1",
+ "status_code": 200,
+ "request": {
+ "columns": [
+ "*.*.*"
+ ]
+ },
+ "response": [
+ {
+ "email": "google@gmail.com",
+ "auth_id": 1,
+ "name": "James Bond",
+ "articles": [
+ {
+ "rating": 3,
+ "author_id": 1,
+ "category_id": 1,
+ "category": {
+ "name": "Haskell",
+ "id": 1,
+ "description": "Haskell Haskell Haskell"
+ },
+ "content": "Heloo world",
+ "is_published": false,
+ "author": {
+ "email": "google@gmail.com",
+ "auth_id": 1,
+ "name": "James Bond",
+ "id": 1
+ },
+ "id": 1,
+ "title": "Dependent Haskell"
+ }
+ ],
+ "id": 1
+ }
+ ],
+ "method": "POST"
+ },
+ {
+ "name": "selct *.*.* as admin",
+ "url": "/api/1/table/author/select",
+ "role": "admin",
+ "user_id": "1",
+ "status_code": 200,
+ "request": {
+ "columns": [
+ "*.*.*",
+ "*.*"
+ ]
+ },
+ "response": [
+ {
+ "email": "google@gmail.com",
+ "auth_id": 1,
+ "name": "James Bond",
+ "articles": [
+ {
+ "rating": 3,
+ "author_id": 1,
+ "category_id": 1,
+ "category": {
+ "name": "Haskell",
+ "id": 1,
+ "description": "Haskell Haskell Haskell"
+ },
+ "content": "Heloo world",
+ "is_published": false,
+ "author": {
+ "email": "google@gmail.com",
+ "auth_id": 1,
+ "name": "James Bond",
+ "id": 1
+ },
+ "id": 1,
+ "title": "Dependent Haskell"
+ }
+ ],
+ "id": 1
+ }
+ ],
+ "method": "POST"
+ },
+ {
+ "name": "select * (articles *) as admin",
+ "url": "/api/1/table/author/select",
+ "role": "admin",
+ "user_id": "1",
+ "status_code": 200,
+ "request": {
+ "columns": [
+ "*",
+ {
+ "name": "articles",
+ "columns": [
+ "*"
+ ]
+ }
+ ]
+ },
+ "response": [
+ {
+ "email": "google@gmail.com",
+ "auth_id": 1,
+ "name": "James Bond",
+ "articles": [
+ {
+ "rating": 3,
+ "author_id": 1,
+ "category_id": 1,
+ "content": "Heloo world",
+ "is_published": false,
+ "id": 1,
+ "title": "Dependent Haskell"
+ }
+ ],
+ "id": 1
+ }
+ ],
+ "method": "POST"
+ }
+ ]
+}
diff --git a/server/testcases/wildcard_user.json b/server/testcases/wildcard_user.json
new file mode 100644
index 00000000..10aeb1e2
--- /dev/null
+++ b/server/testcases/wildcard_user.json
@@ -0,0 +1,286 @@
+{
+ "description": "check wildcards for user role",
+ "depends": [
+ "no_permission_category.json"
+ ],
+ "items": [
+ {
+ "name": "create author B1 as admin",
+ "url": "/api/1/table/author/insert",
+ "role": "admin",
+ "user_id": "1",
+ "status_code": 200,
+ "request": {
+ "objects": [{
+ "name": "James Bond",
+ "auth_id": 1,
+ "email": "google@gmail.com"
+ }],
+ "returning": [
+ "id"
+ ]
+ },
+ "response": {
+ "affected_rows": 1,
+ "returning": [
+ {
+ "id": 1
+ }
+ ]
+ },
+ "method": "POST"
+ },
+ {
+ "name": "create category",
+ "url": "/api/1/table/category/insert",
+ "role": "admin",
+ "user_id": "1",
+ "status_code": 200,
+ "request": {
+ "objects": [{
+ "name": "Haskell",
+ "description": "Haskell Haskell Haskell"
+ }],
+ "returning": [
+ "id"
+ ]
+ },
+ "response": {
+ "affected_rows": 1,
+ "returning": [
+ {
+ "id": 1
+ }
+ ]
+ },
+ "method": "POST"
+ },
+ {
+ "name": "create article under B1",
+ "url": "/api/1/table/article/insert",
+ "role": "user",
+ "user_id": "1",
+ "status_code": 200,
+ "request": {
+ "objects": [{
+ "title": "Dependent Haskell",
+ "author_id": 1,
+ "content": "Heloo world",
+ "is_published": false,
+ "rating": 3,
+ "category_id": 1
+ }],
+ "returning": [
+ "id"
+ ]
+ },
+ "response": {
+ "affected_rows": 1,
+ "returning": [
+ {
+ "id": 1
+ }
+ ]
+ },
+ "method": "POST"
+ },
+
+ {
+ "name": "selct *.* as user",
+ "url": "/api/1/table/author/select",
+ "role": "user",
+ "user_id": "1",
+ "status_code": 200,
+ "request": {
+ "columns": [
+ "*.*"
+ ]
+ },
+ "response": [
+ {
+ "name": "James Bond",
+ "id": 1,
+ "auth_id": 1,
+ "articles": [
+ {
+ "title": "Dependent Haskell",
+ "author_id": 1,
+ "content": "Heloo world",
+ "is_published": false,
+ "rating": 3,
+ "id": 1
+ }
+ ]
+ }
+ ],
+ "method": "POST"
+ },
+ {
+ "name": "selct *.* as user",
+ "url": "/api/1/table/author/select",
+ "role": "user",
+ "user_id": "1",
+ "status_code": 200,
+ "request": {
+ "columns": [
+ "*.*",
+ "name"
+ ]
+ },
+ "response": [
+ {
+ "name": "James Bond",
+ "id": 1,
+ "auth_id": 1,
+ "articles": [
+ {
+ "title": "Dependent Haskell",
+ "author_id": 1,
+ "content": "Heloo world",
+ "is_published": false,
+ "rating": 3,
+ "id": 1
+ }
+ ]
+ }
+ ],
+ "method": "POST"
+ },
+ {
+ "name": "selct *.* as user",
+ "url": "/api/1/table/author/select",
+ "role": "user",
+ "user_id": "1",
+ "status_code": 200,
+ "request": {
+ "columns": [
+ "*.*",
+ {
+ "name": "articles",
+ "columns": [
+ "title"
+ ]
+ }
+ ]
+ },
+ "response": [
+ {
+ "name": "James Bond",
+ "id": 1,
+ "auth_id": 1,
+ "articles": [
+ {
+ "title": "Dependent Haskell"
+ }
+ ]
+ }
+ ],
+ "method": "POST"
+ },
+ {
+ "name": "selct *.*.* as user",
+ "url": "/api/1/table/author/select",
+ "role": "user",
+ "user_id": "1",
+ "status_code": 200,
+ "request": {
+ "columns": [
+ "*.*.*"
+ ]
+ },
+ "response": [
+ {
+ "name": "James Bond",
+ "id": 1,
+ "auth_id": 1,
+ "articles": [
+ {
+ "title": "Dependent Haskell",
+ "author_id": 1,
+ "content": "Heloo world",
+ "is_published": false,
+ "rating": 3,
+ "id": 1,
+ "author": {
+ "name": "James Bond",
+ "id": 1
+ }
+ }
+ ]
+ }
+ ],
+ "method": "POST"
+ },
+ {
+ "name": "selct *.*.* as user",
+ "url": "/api/1/table/author/select",
+ "role": "user",
+ "user_id": "1",
+ "status_code": 200,
+ "request": {
+ "columns": [
+ "*.*.*",
+ "*.*"
+ ]
+ },
+ "response": [
+ {
+ "name": "James Bond",
+ "id": 1,
+ "auth_id": 1,
+ "articles": [
+ {
+ "title": "Dependent Haskell",
+ "author_id": 1,
+ "content": "Heloo world",
+ "is_published": false,
+ "rating": 3,
+ "id": 1,
+ "author": {
+ "name": "James Bond",
+ "id": 1
+ }
+ }
+ ]
+ }
+ ],
+ "method": "POST"
+ },
+ {
+ "name": "select * (articles *) as user",
+ "url": "/api/1/table/author/select",
+ "role": "user",
+ "user_id": "1",
+ "status_code": 200,
+ "request": {
+ "columns": [
+ "*",
+ {
+ "name": "articles",
+ "columns": [
+ "*"
+ ]
+ }
+ ]
+ },
+ "response": [
+ {
+ "name": "James Bond",
+ "id": 1,
+ "auth_id": 1,
+ "articles": [
+ {
+ "title": "Dependent Haskell",
+ "content": "Heloo world",
+ "is_published": false,
+ "rating": 3,
+ "id": 1,
+ "author_id": 1
+ }
+ ]
+ }
+ ],
+ "method": "POST"
+ }
+ ]
+}