diff --git a/.github/workflows/haskell.yml b/.github/workflows/haskell.yml index af8b007f5..cd321bf40 100644 --- a/.github/workflows/haskell.yml +++ b/.github/workflows/haskell.yml @@ -45,13 +45,12 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - cabal: ["3.2"] + cabal: ["3.4"] ghc: - - "8.2.2" - "8.4.4" - "8.6.5" - "8.8.4" - - "8.10.1" + - "8.10.3" env: CONFIG: "--enable-tests" @@ -70,6 +69,7 @@ jobs: uses: supercharge/redis-github-action@1.1.0 - run: cabal v2-update - run: cabal v2-freeze $CONFIG + - run: cat cabal.project.freeze - uses: actions/cache@v2 with: path: | @@ -77,7 +77,7 @@ jobs: key: ${{ runner.os }}-${{ matrix.ghc }}-${{ hashFiles('cabal.project.freeze') }} restore-keys: | ${{ runner.os }}-${{ matrix.ghc }}-${{ hashFiles('cabal.project.freeze') }} - # ${{ runner.os }}-${{ matrix.ghc }}- + ${{ runner.os }}-${{ matrix.ghc }}- - run: cabal v2-build all --disable-optimization --only-dependencies $CONFIG - run: cabal v2-build all --disable-optimization $CONFIG - run: cabal v2-test all --disable-optimization $CONFIG diff --git a/.gitignore b/.gitignore index dfdf38bbb..ae521ad58 100644 --- a/.gitignore +++ b/.gitignore @@ -51,3 +51,4 @@ persistent-test/db/ .hspec-failures stack.yaml.lock +*.yaml.lock diff --git a/cabal.project b/cabal.project index 99ddaa950..34b031566 100644 --- a/cabal.project +++ b/cabal.project @@ -2,7 +2,7 @@ packages: persistent persistent-sqlite persistent-test - persistent-mongoDB + -- persistent-mongoDB persistent-mysql persistent-postgresql persistent-redis diff --git a/persistent-mongoDB/Database/Persist/MongoDB.hs b/persistent-mongoDB/Database/Persist/MongoDB.hs index 25cb38b70..65705559a 100644 --- a/persistent-mongoDB/Database/Persist/MongoDB.hs +++ b/persistent-mongoDB/Database/Persist/MongoDB.hs @@ -112,6 +112,7 @@ module Database.Persist.MongoDB , module Database.Persist ) where +import qualified Data.List.NonEmpty as NEL import Control.Exception (throw, throwIO) import Control.Monad (liftM, (>=>), forM_, unless) import Control.Monad.IO.Class (liftIO) @@ -156,6 +157,7 @@ import Database.MongoDB.Query (Database) import Database.Persist import qualified Database.Persist.Sql as Sql +import Database.Persist.EntityDef.Internal (toEmbedEntityDef) instance HasPersistBackend DB.MongoContext where type BaseBackend DB.MongoContext = DB.MongoContext @@ -408,7 +410,7 @@ updateToMongoField (BackendUpdate up) = mongoUpdateToDoc up -- | convert a unique key into a MongoDB document toUniquesDoc :: forall record. (PersistEntity record) => Unique record -> [DB.Field] toUniquesDoc uniq = zipWith (DB.:=) - (map (unFieldNameDB . snd) $ persistUniqueToFieldNames uniq) + (map (unFieldNameDB . snd) $ NEL.toList $ persistUniqueToFieldNames uniq) (map DB.val (persistUniqueToValues uniq)) -- | convert a PersistEntity into document fields. @@ -416,31 +418,35 @@ toUniquesDoc uniq = zipWith (DB.:=) -- 'recordToDocument' includes nulls toInsertDoc :: forall record. (PersistEntity record, PersistEntityBackend record ~ DB.MongoContext) => record -> DB.Document -toInsertDoc record = zipFilter (embeddedFields $ toEmbedEntityDef entDef) - (map toPersistValue $ toPersistFields record) +toInsertDoc record = + zipFilter + (embeddedFields $ toEmbedEntityDef entDef) + (map toPersistValue $ toPersistFields record) where entDef = entityDef $ Just record - zipFilter :: [EmbedFieldDef] -> [PersistValue] -> DB.Document - zipFilter [] _ = [] - zipFilter _ [] = [] - zipFilter (fd:efields) (pv:pvs) = - if isNull pv then recur else - (fieldToLabel fd DB.:= embeddedVal (emFieldEmbed fd) pv):recur - + zipFilter xs ys = + map (\(fd, pv) -> + fieldToLabel fd + DB.:= + embeddedVal pv + ) + $ filter (\(_, pv) -> isNull pv) + $ zip xs ys where - recur = zipFilter efields pvs - isNull PersistNull = True isNull (PersistMap m) = null m isNull (PersistList l) = null l isNull _ = False -- make sure to removed nulls from embedded entities also - embeddedVal :: Maybe EmbedEntityDef -> PersistValue -> DB.Value - embeddedVal (Just emDef) (PersistMap m) = DB.Doc $ - zipFilter (embeddedFields emDef) $ map snd m - embeddedVal je@(Just _) (PersistList l) = DB.Array $ map (embeddedVal je) l - embeddedVal _ pv = DB.val pv + embeddedVal :: PersistValue -> DB.Value + embeddedVal (PersistMap m) = + DB.Doc $ fmap (\(k, v) -> k DB.:= DB.val v) $ m + -- zipFilter fields $ map snd m + embeddedVal (PersistList l) = + DB.Array $ map embeddedVal l + embeddedVal pv = + DB.val pv entityToInsertDoc :: forall record. (PersistEntity record, PersistEntityBackend record ~ DB.MongoContext) => Entity record -> DB.Document @@ -448,13 +454,13 @@ entityToInsertDoc (Entity key record) = keyToMongoDoc key ++ toInsertDoc record collectionName :: (PersistEntity record, PersistEntityBackend record ~ DB.MongoContext) => record -> Text -collectionName = unEntityNameDB . entityDB . entityDef . Just +collectionName = unEntityNameDB . getEntityDBName . entityDef . Just -- | convert a PersistEntity into document fields. -- unlike 'toInsertDoc', nulls are included. recordToDocument :: (PersistEntity record, PersistEntityBackend record ~ DB.MongoContext) => record -> DB.Document -recordToDocument record = zipToDoc (map fieldDB $ entityFields entity) (toPersistFields record) +recordToDocument record = zipToDoc (map fieldDB $ getEntityFields entity) (toPersistFields record) where entity = entityDef $ Just record @@ -646,7 +652,7 @@ keyToMongoDoc k = case entityPrimary $ entityDefFromKey k of Nothing -> zipToDoc [FieldNameDB id_] values Just pdef -> [id_ DB.=: zipToDoc (primaryNames pdef) values] where - primaryNames = map fieldDB . compositeFields + primaryNames = map fieldDB . NEL.toList . compositeFields values = keyToValues k entityDefFromKey :: PersistEntity record => Key record -> EntityDef @@ -658,7 +664,7 @@ collectionNameFromKey = collectionName . recordTypeFromKey projectionFromEntityDef :: EntityDef -> DB.Projector projectionFromEntityDef eDef = - map toField (entityFields eDef) + map toField (getEntityFields eDef) where toField :: FieldDef -> DB.Field toField fDef = (unFieldNameDB (fieldDB fDef)) DB.=: (1 :: Int) @@ -920,7 +926,7 @@ fromPersistValuesThrow :: (Trans.MonadIO m, PersistEntity record, PersistEntityB fromPersistValuesThrow entDef doc = case eitherFromPersistValues entDef doc of Left t -> Trans.liftIO . throwIO $ PersistMarshalError $ - unEntityNameHS (entityHaskell entDef) `mappend` ": " `mappend` t + unEntityNameHS (getEntityHaskellName entDef) `mappend` ": " `mappend` t Right entity -> return entity mapLeft :: (a -> c) -> Either a b -> Either c b @@ -949,10 +955,13 @@ eitherFromPersistValues entDef doc = case mKey of -- Persistent creates a Haskell record from a list of PersistValue -- But most importantly it puts all PersistValues in the proper order orderPersistValues :: EmbedEntityDef -> [(Text, PersistValue)] -> [(Text, PersistValue)] -orderPersistValues entDef castDoc = reorder +orderPersistValues entDef castDoc = + match castColumns castDoc [] where - castColumns = map nameAndEmbed (embeddedFields entDef) - nameAndEmbed fdef = (fieldToLabel fdef, emFieldEmbed fdef) + castColumns = + map nameAndEmbed (embeddedFields entDef) + nameAndEmbed fdef = + (fieldToLabel fdef, emFieldEmbed fdef) -- TODO: the below reasoning should be re-thought now that we are no longer inserting null: searching for a null column will look at every returned field before giving up -- Also, we are now doing the _id lookup at the start. @@ -970,44 +979,44 @@ orderPersistValues entDef castDoc = reorder -- * but once we found an item in the alist use a new alist without that item for future lookups -- * so for the last query there is only one item left -- - reorder :: [(Text, PersistValue)] - reorder = match castColumns castDoc [] + match :: [(Text, Maybe (Either a EntityNameHS) )] + -> [(Text, PersistValue)] + -> [(Text, PersistValue)] + -> [(Text, PersistValue)] + -- when there are no more Persistent castColumns we are done + -- + -- allow extra mongoDB fields that persistent does not know about + -- another application may use fields we don't care about + -- our own application may set extra fields with the raw driver + match [] _ values = values + match ((fieldName, medef) : columns) fields values = + let + ((_, pv) , unused) = + matchOne fields [] + in + match columns unused $ + values ++ [(fieldName, nestedOrder medef pv)] where - match :: [(Text, Maybe EmbedEntityDef)] - -> [(Text, PersistValue)] - -> [(Text, PersistValue)] - -> [(Text, PersistValue)] - -- when there are no more Persistent castColumns we are done - -- - -- allow extra mongoDB fields that persistent does not know about - -- another application may use fields we don't care about - -- our own application may set extra fields with the raw driver - match [] _ values = values - match (column:columns) fields values = - let (found, unused) = matchOne fields [] - in match columns unused $ values ++ - [(fst column, nestedOrder (snd column) (snd found))] - where - nestedOrder (Just em) (PersistMap m) = - PersistMap $ orderPersistValues em m - nestedOrder (Just em) (PersistList l) = - PersistList $ map (nestedOrder (Just em)) l - -- implied: nestedOrder Nothing found = found - nestedOrder _ found = found - - matchOne (field:fs) tried = - if fst column == fst field + nestedOrder (Just _) (PersistMap m) = + PersistMap m + nestedOrder (Just em) (PersistList l) = + PersistList $ map (nestedOrder (Just em)) l + nestedOrder Nothing found = + found + + matchOne (field:fs) tried = + if fieldName == fst field -- snd drops the name now that it has been used to make the match -- persistent will add the field name later then (field, tried ++ fs) else matchOne fs (field:tried) - -- if field is not found, assume it was a Nothing - -- - -- a Nothing could be stored as null, but that would take up space. - -- instead, we want to store no field at all: that takes less space. - -- Also, another ORM may be doing the same - -- Also, this adding a Maybe field means no migration required - matchOne [] tried = ((fst column, PersistNull), tried) + -- if field is not found, assume it was a Nothing + -- + -- a Nothing could be stored as null, but that would take up space. + -- instead, we want to store no field at all: that takes less space. + -- Also, another ORM may be doing the same + -- Also, this adding a Maybe field means no migration required + matchOne [] tried = ((fieldName, PersistNull), tried) assocListFromDoc :: DB.Document -> [(Text, PersistValue)] assocListFromDoc = Prelude.map (\f -> ( (DB.label f), cast (DB.value f) ) ) diff --git a/persistent-mongoDB/README.md b/persistent-mongoDB/README.md new file mode 100644 index 000000000..2e6c015c5 --- /dev/null +++ b/persistent-mongoDB/README.md @@ -0,0 +1,11 @@ +# persistent-mongoDB + +`persistent-mongoDB` is on hiatus. + +There's a lot of complexity around the `EmbedEntityDef` stuff that makes it +really annoying to use. + +A new version of `persistent` will make that easy to work with, and I'll fix it +up then. + +If you want MongoDB *now* then PRs are welcome. diff --git a/persistent-mongoDB/persistent-mongoDB.cabal b/persistent-mongoDB/persistent-mongoDB.cabal index 5ee632598..a4baac253 100644 --- a/persistent-mongoDB/persistent-mongoDB.cabal +++ b/persistent-mongoDB/persistent-mongoDB.cabal @@ -72,9 +72,6 @@ test-suite test , time , transformers , unliftio-core - if impl(ghc < 8) - build-depends: - semigroups default-language: Haskell2010 source-repository head diff --git a/persistent-mysql/ChangeLog.md b/persistent-mysql/ChangeLog.md index 06e348189..a36391400 100644 --- a/persistent-mysql/ChangeLog.md +++ b/persistent-mysql/ChangeLog.md @@ -1,5 +1,11 @@ # Changelog for persistent-mysql +## 2.13.0.0 (unreleased) + +* [#1225](https://github.com/yesodweb/persistent/pull/1225) + * Support `persistent-2.13` changes for SqlBackend being made internal. + * Remove the deprecated `SomeField` type and pattern. + ## 2.12.1.0 * Expose `openMySQLConn` for explicit reference to opened connection. [#1248](https://github.com/yesodweb/persistent/pull/1248) diff --git a/persistent-mysql/Database/Persist/MySQL.hs b/persistent-mysql/Database/Persist/MySQL.hs index 281fc586d..b0a4daca0 100644 --- a/persistent-mysql/Database/Persist/MySQL.hs +++ b/persistent-mysql/Database/Persist/MySQL.hs @@ -26,8 +26,6 @@ module Database.Persist.MySQL , insertOnDuplicateKeyUpdate , insertManyOnDuplicateKeyUpdate , HandleUpdateCollision - , pattern SomeField - , SomeField , copyField , copyUnlessNull , copyUnlessEmpty @@ -48,6 +46,7 @@ import Control.Monad.Trans.Except (ExceptT, runExceptT) import Control.Monad.Trans.Reader (ReaderT, runReaderT) import Control.Monad.Trans.Writer (runWriterT) +import qualified Data.List.NonEmpty as NEL import Data.Acquire (Acquire, mkAcquire, with) import Data.Aeson import Data.Aeson.Types (modifyFailure) @@ -73,6 +72,7 @@ import GHC.Stack import System.Environment (getEnvironment) import Database.Persist.Sql +import Database.Persist.SqlBackend import Database.Persist.Sql.Types.Internal (makeIsolationLevelStatement) import qualified Database.Persist.Sql.Util as Util @@ -87,40 +87,40 @@ import qualified Database.MySQL.Simple.Types as MySQL -- The pool is properly released after the action finishes using -- it. Note that you should not use the given 'ConnectionPool' -- outside the action since it may be already been released. -withMySQLPool :: (MonadLoggerIO m, MonadUnliftIO m) - => MySQL.ConnectInfo - -- ^ Connection information. - -> Int - -- ^ Number of connections to be kept open in the pool. - -> (Pool SqlBackend -> m a) - -- ^ Action to be executed that uses the connection pool. - -> m a +withMySQLPool + :: (MonadLoggerIO m, MonadUnliftIO m) + => MySQL.ConnectInfo + -- ^ Connection information. + -> Int + -- ^ Number of connections to be kept open in the pool. + -> (Pool SqlBackend -> m a) + -- ^ Action to be executed that uses the connection pool. + -> m a withMySQLPool ci = withSqlPool $ open' ci - -- | Create a MySQL connection pool. Note that it's your -- responsibility to properly close the connection pool when -- unneeded. Use 'withMySQLPool' for automatic resource control. -createMySQLPool :: (MonadUnliftIO m, MonadLoggerIO m) - => MySQL.ConnectInfo - -- ^ Connection information. - -> Int - -- ^ Number of connections to be kept open in the pool. - -> m (Pool SqlBackend) +createMySQLPool + :: (MonadUnliftIO m, MonadLoggerIO m) + => MySQL.ConnectInfo + -- ^ Connection information. + -> Int + -- ^ Number of connections to be kept open in the pool. + -> m (Pool SqlBackend) createMySQLPool ci = createSqlPool $ open' ci - -- | Same as 'withMySQLPool', but instead of opening a pool -- of connections, only one connection is opened. -withMySQLConn :: (MonadUnliftIO m, MonadLoggerIO m) - => MySQL.ConnectInfo - -- ^ Connection information. - -> (SqlBackend -> m a) - -- ^ Action to be executed that uses the connection. - -> m a +withMySQLConn + :: (MonadUnliftIO m, MonadLoggerIO m) + => MySQL.ConnectInfo + -- ^ Connection information. + -> (SqlBackend -> m a) + -- ^ Action to be executed that uses the connection. + -> m a withMySQLConn = withSqlConn . open' - -- | Open a connection to MySQL server, initialize the 'SqlBackend' and return -- their tuple -- @@ -131,32 +131,30 @@ openMySQLConn ci logFunc = do MySQLBase.autocommit conn False -- disable autocommit! smap <- newIORef $ Map.empty let - backend = SqlBackend - { connPrepare = prepare' conn - , connStmtMap = smap - , connInsertSql = insertSql' - , connInsertManySql = Nothing - , connUpsertSql = Nothing - , connPutManySql = Just putManySql - , connClose = MySQL.close conn - , connMigrateSql = migrate' ci - , connBegin = \_ mIsolation -> do - forM_ mIsolation $ \iso -> MySQL.execute_ conn (makeIsolationLevelStatement iso) - MySQL.execute_ conn "start transaction" >> return () - , connCommit = const $ MySQL.commit conn - , connRollback = const $ MySQL.rollback conn - , connEscapeFieldName = T.pack . escapeF - , connEscapeTableName = T.pack . escapeE . entityDB - , connEscapeRawName = T.pack . escapeDBName . T.unpack - , connNoLimit = "LIMIT 18446744073709551615" - -- This noLimit is suggested by MySQL's own docs, see - -- - , connRDBMS = "mysql" - , connLimitOffset = decorateSQLWithLimitOffset "LIMIT 18446744073709551615" - , connLogFunc = logFunc - , connMaxParams = Nothing - , connRepsertManySql = Just repsertManySql - } + backend = + setConnPutManySql putManySql $ + setConnRepsertManySql repsertManySql $ + mkSqlBackend MkSqlBackendArgs + { connPrepare = prepare' conn + , connStmtMap = smap + , connInsertSql = insertSql' + , connClose = MySQL.close conn + , connMigrateSql = migrate' ci + , connBegin = \_ mIsolation -> do + forM_ mIsolation $ \iso -> MySQL.execute_ conn (makeIsolationLevelStatement iso) + MySQL.execute_ conn "start transaction" >> return () + , connCommit = const $ MySQL.commit conn + , connRollback = const $ MySQL.rollback conn + , connEscapeFieldName = T.pack . escapeF + , connEscapeTableName = T.pack . escapeE . getEntityDBName + , connEscapeRawName = T.pack . escapeDBName . T.unpack + , connNoLimit = "LIMIT 18446744073709551615" + -- This noLimit is suggested by MySQL's own docs, see + -- + , connRDBMS = "mysql" + , connLimitOffset = decorateSQLWithLimitOffset "LIMIT 18446744073709551615" + , connLogFunc = logFunc + } pure (conn, backend) @@ -164,7 +162,6 @@ openMySQLConn ci logFunc = do open' :: MySQL.ConnectInfo -> LogFunc -> IO SqlBackend open' ci logFunc = snd <$> openMySQLConn ci logFunc - -- | Prepare a query. We don't support prepared statements, but -- we'll do some client-side preprocessing here. prepare' :: MySQL.Connection -> Text -> IO Statement @@ -181,14 +178,16 @@ prepare' conn sql = do -- | SQL code to be executed when inserting an entity. insertSql' :: EntityDef -> [PersistValue] -> InsertSqlResult insertSql' ent vals = - case entityPrimary ent of - Just _ -> ISRManyKeys sql vals - Nothing -> ISRInsertGet sql "SELECT LAST_INSERT_ID()" + case getEntityId ent of + EntityIdNaturalKey _ -> + ISRManyKeys sql vals + EntityIdField _ -> + ISRInsertGet sql "SELECT LAST_INSERT_ID()" where (fieldNames, placeholders) = unzip (Util.mkInsertPlaceholders ent escapeFT) sql = T.concat [ "INSERT INTO " - , escapeET $ entityDB ent + , escapeET $ getEntityDBName ent , "(" , T.intercalate "," fieldNames , ") VALUES(" @@ -353,7 +352,7 @@ migrate' :: MySQL.ConnectInfo -> EntityDef -> IO (Either [Text] [(Bool, Text)]) migrate' connectInfo allDefs getter val = do - let name = entityDB val + let name = getEntityDBName val let (newcols, udefs, fdefs) = mysqlMkColumns allDefs val old <- getColumns connectInfo getter val newcols let udspair = map udToPair udefs @@ -374,7 +373,7 @@ migrate' connectInfo allDefs getter val = do let refTarget = addReference allDefs refConstraintName refTblName cname (crFieldCascade cRef) - guard $ cname /= fieldDB (entityId val) + guard $ Just cname /= fmap fieldDB (getEntityIdField val) return $ AlterColumn name refTarget @@ -448,35 +447,58 @@ migrate' connectInfo allDefs getter val = do addTable :: [Column] -> EntityDef -> AlterDB addTable cols entity = AddTable $ concat - -- Lower case e: see Database.Persist.Sql.Migration - [ "CREATe TABLE " - , escapeE name - , "(" - , idtxt - , if null nonIdCols then [] else "," - , intercalate "," $ map showColumn nonIdCols - , ")" - ] - where - nonIdCols = - filter (\c -> cName c /= fieldDB (entityId entity) ) cols - name = entityDB entity - idtxt = case entityPrimary entity of - Just pdef -> concat [" PRIMARY KEY (", intercalate "," $ map (escapeF . fieldDB) $ compositeFields pdef, ")"] - Nothing -> - let defText = defaultAttribute $ fieldAttrs $ entityId entity - sType = fieldSqlType $ entityId entity - autoIncrementText = case (sType, defText) of - (SqlInt64, Nothing) -> " AUTO_INCREMENT" - _ -> "" - maxlen = findMaxLenOfField (entityId entity) - in concat - [ escapeF $ fieldDB $ entityId entity - , " " <> showSqlType sType maxlen False - , " NOT NULL" - , autoIncrementText - , " PRIMARY KEY" - ] + -- Lower case e: see Database.Persist.Sql.Migration + [ "CREATe TABLE " + , escapeE name + , "(" + , idtxt + , if null nonIdCols then [] else "," + , intercalate "," $ map showColumn nonIdCols + , ")" + ] + where + nonIdCols = + filter (\c -> Just (cName c) /= fmap fieldDB (getEntityIdField entity) ) cols + name = + getEntityDBName entity + idtxt = + case getEntityId entity of + EntityIdNaturalKey pdef -> + concat + [ " PRIMARY KEY (" + , intercalate "," + $ map (escapeF . fieldDB) $ NEL.toList $ compositeFields pdef + , ")" + ] + EntityIdField idField -> + let + defText = + defaultAttribute $ fieldAttrs idField + sType = + fieldSqlType idField + autoIncrementText = + case (sType, defText) of + (SqlInt64, Nothing) -> " AUTO_INCREMENT" + _ -> "" + maxlen = + findMaxLenOfField idField + in + concat + [ escapeF $ fieldDB idField + , " " <> showSqlType sType maxlen False + , " NOT NULL" + , autoIncrementText + , " PRIMARY KEY" + , case defText of + Nothing -> + "" + Just def -> + concat + [ " DEFAULT (" + , T.unpack def + , ")" + ] + ] -- | Find out the type of a column. findTypeOfColumn :: [EntityDef] -> EntityNameDB -> FieldNameDB -> (FieldNameDB, FieldType) @@ -488,8 +510,8 @@ findTypeOfColumn allDefs name col = ) ((,) col) $ do - entDef <- find ((== name) . entityDB) allDefs - fieldDef <- find ((== col) . fieldDB) (entityFields entDef) + entDef <- find ((== name) . getEntityDBName) allDefs + fieldDef <- find ((== col) . fieldDB) (getEntityFieldsDatabase entDef) return (fieldType fieldDef) -- | Find out the maxlen of a column (default to 200) @@ -497,8 +519,8 @@ findMaxLenOfColumn :: [EntityDef] -> EntityNameDB -> FieldNameDB -> (FieldNameDB findMaxLenOfColumn allDefs name col = maybe (col, 200) ((,) col) $ do - entDef <- find ((== name) . entityDB) allDefs - fieldDef <- find ((== col) . fieldDB) (entityFields entDef) + entDef <- find ((== name) . getEntityDBName) allDefs + fieldDef <- find ((== col) . fieldDB) (getEntityFieldsDatabase entDef) findMaxLenOfField fieldDef -- | Find out the maxlen of a field @@ -532,8 +554,8 @@ addReference allDefs fkeyname reftable cname fc = ++ " (allDefs = " ++ show allDefs ++ ")" referencedColumns = fromMaybe errorMessage $ do - entDef <- find ((== reftable) . entityDB) allDefs - return $ map fieldDB $ entityKeyFields entDef + entDef <- find ((== reftable) . getEntityDBName) allDefs + return $ map fieldDB $ NEL.toList $ getEntityKeyFields entDef data AlterColumn = Change Column | Add' Column @@ -564,7 +586,7 @@ data AlterDB = AddTable String udToPair :: UniqueDef -> (ConstraintNameDB, [FieldNameDB]) -udToPair ud = (uniqueDBName ud, map snd $ uniqueFields ud) +udToPair ud = (uniqueDBName ud, map snd $ NEL.toList $ uniqueFields ud) ---------------------------------------------------------------------- @@ -621,15 +643,15 @@ getColumns connectInfo getter def cols = do Nothing -> rs (Just r) -> (unFieldNameDB $ cName c, r) : rs vals = [ PersistText $ pack $ MySQL.connectDatabase connectInfo - , PersistText $ unEntityNameDB $ entityDB def - -- , PersistText $ unDBName $ fieldDB $ entityId def + , PersistText $ unEntityNameDB $ getEntityDBName def + -- , PersistText $ unDBName $ fieldDB $ getEntityId def ] helperClmns = CL.mapM getIt .| CL.consume where getIt row = fmap (either Left (Right . Left)) . liftIO . - getColumn connectInfo getter (entityDB def) row $ ref + getColumn connectInfo getter (getEntityDBName def) row $ ref where ref = case row of (PersistText cname : _) -> (Map.lookup cname refMap) _ -> Nothing @@ -837,7 +859,7 @@ getAlters getAlters allDefs edef (c1, u1) (c2, u2) = (getAltersC c1 c2, getAltersU u1 u2) where - tblName = entityDB edef + tblName = getEntityDBName edef getAltersC [] old = concatMap dropColumn old getAltersC (new:news) old = let (alters, old') = findAlters edef allDefs new old @@ -900,8 +922,9 @@ findAlters edef allDefs col@(Column name isNull type_ def gen _defConstraintName refAdd = case (ref == ref', ref) of (False, Just ColumnReference {crTableName=tname, crConstraintName=cname, crFieldCascade = cfc }) - | tname /= entityDB edef - , unConstraintNameDB cname /= unFieldNameDB (fieldDB (entityId edef)) + | tname /= getEntityDBName edef + , Just idField <- getEntityIdField edef + , unConstraintNameDB cname /= unFieldNameDB (fieldDB idField) -> [addReference allDefs cname tname name cfc] _ -> [] @@ -1211,7 +1234,7 @@ mockMigrate :: MySQL.ConnectInfo -> EntityDef -> IO (Either [Text] [(Bool, Text)]) mockMigrate _connectInfo allDefs _getter val = do - let name = entityDB val + let name = getEntityDBName val let (newcols, udefs, fdefs) = mysqlMkColumns allDefs val let udspair = map udToPair udefs case () of @@ -1255,37 +1278,34 @@ mockMigrate _connectInfo allDefs _getter val = do -- the actual database isn't already present in the system. mockMigration :: Migration -> IO () mockMigration mig = do - smap <- newIORef $ Map.empty - let sqlbackend = SqlBackend { connPrepare = \_ -> do - return Statement - { stmtFinalize = return () - , stmtReset = return () - , stmtExecute = undefined - , stmtQuery = \_ -> return $ return () - }, - connInsertManySql = Nothing, - connInsertSql = undefined, - connStmtMap = smap, - connClose = undefined, - connMigrateSql = mockMigrate undefined, - connBegin = undefined, - connCommit = undefined, - connRollback = undefined, - connEscapeFieldName = T.pack . escapeDBName . T.unpack . unFieldNameDB, - connEscapeTableName = T.pack . escapeDBName . T.unpack . unEntityNameDB . entityDB, - connEscapeRawName = T.pack . escapeDBName . T.unpack, - connNoLimit = undefined, - connRDBMS = undefined, - connLimitOffset = undefined, - connLogFunc = undefined, - connUpsertSql = undefined, - connPutManySql = undefined, - connMaxParams = Nothing, - connRepsertManySql = Nothing - } - result = runReaderT . runWriterT . runWriterT $ mig - resp <- result sqlbackend - mapM_ T.putStrLn $ map snd $ snd resp + smap <- newIORef $ Map.empty + let sqlbackend = + mkSqlBackend MkSqlBackendArgs + { connPrepare = \_ -> do + return Statement + { stmtFinalize = return () + , stmtReset = return () + , stmtExecute = undefined + , stmtQuery = \_ -> return $ return () + } + , connInsertSql = undefined + , connStmtMap = smap + , connClose = undefined + , connMigrateSql = mockMigrate undefined + , connBegin = undefined + , connCommit = undefined + , connRollback = undefined + , connEscapeFieldName = T.pack . escapeDBName . T.unpack . unFieldNameDB + , connEscapeTableName = T.pack . escapeDBName . T.unpack . unEntityNameDB . getEntityDBName + , connEscapeRawName = T.pack . escapeDBName . T.unpack + , connNoLimit = undefined + , connRDBMS = undefined + , connLimitOffset = undefined + , connLogFunc = undefined + } + result = runReaderT . runWriterT . runWriterT $ mig + resp <- result sqlbackend + mapM_ T.putStrLn $ map snd $ snd resp -- | MySQL specific 'upsert_'. This will prevent multiple queries, when one will -- do. The record will be inserted into the database. In the event that the @@ -1310,21 +1330,10 @@ insertOnDuplicateKeyUpdate record = -- -- @since 2.8.0 data HandleUpdateCollision record where - -- | Copy the field directly from the record. - CopyField :: EntityField record typ -> HandleUpdateCollision record - -- | Only copy the field if it is not equal to the provided value. - CopyUnlessEq :: PersistField typ => EntityField record typ -> typ -> HandleUpdateCollision record - --- | An alias for 'HandleUpdateCollision'. The type previously was only --- used to copy a single value, but was expanded to be handle more complex --- queries. --- --- @since 2.6.2 -type SomeField = HandleUpdateCollision - -pattern SomeField :: EntityField record typ -> SomeField record -pattern SomeField x = CopyField x -{-# DEPRECATED SomeField "The type SomeField is deprecated. Use the type HandleUpdateCollision instead, and use the function copyField instead of the data constructor." #-} + -- | Copy the field directly from the record. + CopyField :: EntityField record typ -> HandleUpdateCollision record + -- | Only copy the field if it is not equal to the provided value. + CopyUnlessEq :: PersistField typ => EntityField record typ -> typ -> HandleUpdateCollision record -- | Copy the field into the database only if the value in the -- corresponding record is non-@NULL@. @@ -1487,8 +1496,8 @@ mkBulkInsertQuery records fieldValues updates = firstField = case entityFieldNames of [] -> error "The entity you're trying to insert does not have any fields." (field:_) -> field - entityFieldNames = map fieldDbToText (entityFields entityDef') - tableName = T.pack . escapeE . entityDB $ entityDef' + entityFieldNames = map fieldDbToText (getEntityFieldsDatabase entityDef') + tableName = T.pack . escapeE . getEntityDBName $ entityDef' copyUnlessValues = map snd fieldsToMaybeCopy recordValues = concatMap (map toPersistValue . toPersistFields) records recordPlaceholders = Util.commaSeparated $ map (Util.parenWrapped . Util.commaSeparated . map (const "?") . toPersistFields) records @@ -1524,12 +1533,12 @@ mkBulkInsertQuery records fieldValues updates = putManySql :: EntityDef -> Int -> Text putManySql ent n = putManySql' fields ent n where - fields = entityFields ent + fields = getEntityFieldsDatabase ent repsertManySql :: EntityDef -> Int -> Text repsertManySql ent n = putManySql' fields ent n where - fields = keyAndEntityFields ent + fields = NEL.toList $ keyAndEntityFields ent putManySql' :: [FieldDef] -> EntityDef -> Int -> Text putManySql' (filter isFieldNotGenerated -> fields) ent n = q @@ -1537,7 +1546,7 @@ putManySql' (filter isFieldNotGenerated -> fields) ent n = q fieldDbToText = (T.pack . escapeF) . fieldDB mkAssignment f = T.concat [f, "=VALUES(", f, ")"] - table = (T.pack . escapeE) . entityDB $ ent + table = (T.pack . escapeE) . getEntityDBName $ ent columns = Util.commaSeparated $ map fieldDbToText fields placeholders = map (const "?") fields updates = map (mkAssignment . fieldDbToText) fields diff --git a/persistent-mysql/persistent-mysql.cabal b/persistent-mysql/persistent-mysql.cabal index aa9bb39ab..ff5e4441f 100644 --- a/persistent-mysql/persistent-mysql.cabal +++ b/persistent-mysql/persistent-mysql.cabal @@ -1,5 +1,5 @@ name: persistent-mysql -version: 2.12.1.0 +version: 2.13.0.0 license: MIT license-file: LICENSE author: Felipe Lessa , Michael Snoyman @@ -28,7 +28,7 @@ extra-source-files: ChangeLog.md library build-depends: base >= 4.9 && < 5 - , persistent >= 2.12 && < 3 + , persistent >= 2.13 && < 3 , aeson >= 1.0 , blaze-builder , bytestring >= 0.10.8 @@ -54,28 +54,34 @@ test-suite test type: exitcode-stdio-1.0 main-is: main.hs hs-source-dirs: test - other-modules: MyInit - InsertDuplicateUpdate - CustomConstraintTest + other-modules: + MyInit + InsertDuplicateUpdate + CustomConstraintTest + ImplicitUuidSpec ghc-options: -Wall - build-depends: base >= 4.9 && < 5 - , persistent - , persistent-mysql - , persistent-qq - , persistent-test - , bytestring - , containers - , fast-logger - , hspec >= 2.4 - , HUnit - , monad-logger - , mysql - , QuickCheck - , quickcheck-instances - , resourcet - , text - , time - , transformers - , unliftio-core + build-depends: + base >= 4.9 && < 5 + , aeson + , bytestring + , containers + , fast-logger + , hspec >= 2.4 + , http-api-data + , HUnit + , monad-logger + , mysql + , path-pieces + , persistent + , persistent-mysql + , persistent-qq + , persistent-test + , QuickCheck + , quickcheck-instances + , resourcet + , text + , time + , transformers + , unliftio-core default-language: Haskell2010 diff --git a/persistent-mysql/test/ImplicitUuidSpec.hs b/persistent-mysql/test/ImplicitUuidSpec.hs new file mode 100644 index 000000000..501b5e7da --- /dev/null +++ b/persistent-mysql/test/ImplicitUuidSpec.hs @@ -0,0 +1,83 @@ +{-# LANGUAGE DataKinds #-} +{-# LANGUAGE DerivingStrategies #-} +{-# LANGUAGE ExistentialQuantification #-} +{-# LANGUAGE FlexibleInstances #-} +{-# LANGUAGE GeneralizedNewtypeDeriving #-} +{-# LANGUAGE MultiParamTypeClasses #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE QuasiQuotes #-} +{-# LANGUAGE StandaloneDeriving #-} +{-# LANGUAGE TemplateHaskell #-} +{-# LANGUAGE TypeApplications #-} +{-# LANGUAGE TypeFamilies #-} +{-# LANGUAGE UndecidableInstances #-} + +module ImplicitUuidSpec where + +import MyInit + +import Data.Proxy +import Database.Persist.MySQL + +import Database.Persist.ImplicitIdDef +import Database.Persist.ImplicitIdDef.Internal (fieldTypeFromTypeable) + +share + [ mkPersist (sqlSettingsUuid "UUID()") + , mkEntityDefList "entities" + ] + [persistLowerCase| + +WithDefUuid + name Text + + deriving Eq Show Ord + +|] + +implicitUuidMigrate :: Migration +implicitUuidMigrate = do + migrateModels entities + +wipe :: IO () +wipe = db $ do + rawExecute "DROP TABLE IF EXISTS with_def_uuid;" [] + void $ runMigrationSilent implicitUuidMigrate + +itDb :: String -> SqlPersistT (LoggingT (ResourceT IO)) a -> SpecWith (Arg (IO ())) +itDb msg action = it msg $ db $ void action + +pass :: IO () +pass = pure () + +spec :: Spec +spec = describe "ImplicitUuidSpec" $ before_ wipe $ do + describe "WithDefUuidKey" $ do + it "works on UUIDs" $ do + let withDefUuidKey = WithDefUuidKey (UUID "Hello") + pass + describe "getEntityId" $ do + let Just idField = getEntityIdField (entityDef (Proxy @WithDefUuid)) + it "has a SqlString SqlType" $ asIO $ do + fieldSqlType idField `shouldBe` SqlString + it "is an implicit ID column" $ asIO $ do + fieldIsImplicitIdColumn idField `shouldBe` True + + describe "insert" $ do + itDb "successfully has a default" $ do + let matt = WithDefUuid + { withDefUuidName = + "Matt" + } + k <- insert matt + mrec <- get k + uuids <- selectList @WithDefUuid [] [] + liftIO $ do + -- MySQL's insert functionality is currently broken. The @k@ + -- here is derived from @SELECT LAST_INSERT_ID()@ which only + -- works on auto incrementing IDs. + -- + -- See #1251 for more details. + mrec `shouldBe` Nothing + + map entityVal uuids `shouldSatisfy` (matt `elem`) diff --git a/persistent-mysql/test/MyInit.hs b/persistent-mysql/test/MyInit.hs index deb7ffdbf..ddd50c83f 100644 --- a/persistent-mysql/test/MyInit.hs +++ b/persistent-mysql/test/MyInit.hs @@ -1,4 +1,7 @@ +{-# LANGUAGE DerivingStrategies #-} +{-# LANGUAGE GeneralizedNewtypeDeriving #-} {-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE TypeApplications #-} module MyInit ( (@/=), (@==), (==@) @@ -26,12 +29,14 @@ module MyInit ( , MonadUnliftIO , liftIO , mkPersist, mkMigrate, share, sqlSettings, persistLowerCase, persistUpperCase + , mkEntityDefList, sqlSettingsUuid , Int32, Int64 , Text , module Control.Monad.Trans.Reader , module Control.Monad , module Database.Persist.Sql , BS.ByteString + , migrateModels , SomeException , MonadFail , TestFn(..) @@ -40,44 +45,71 @@ module MyInit ( , truncateUTCTime , arbText , liftA2 + , LoggingT, ResourceT, UUID(..) ) where import Init - ( TestFn(..), truncateTimeOfDay, truncateUTCTime - , truncateToMicro, arbText, GenerateKey(..) - , (@/=), (@==), (==@) - , assertNotEqual, assertNotEmpty, assertEmpty, asIO - , isTravis, RunDb, MonadFail - ) + ( GenerateKey(..) + , MonadFail + , RunDb + , TestFn(..) + , arbText + , asIO + , assertEmpty + , assertNotEmpty + , assertNotEqual + , isTravis + , truncateTimeOfDay + , truncateToMicro + , truncateUTCTime + , (==@) + , (@/=) + , (@==) + ) -- re-exports import Control.Applicative (liftA2) import Control.Exception (SomeException) -import Control.Monad (void, replicateM, liftM, when, forM_) +import Control.Monad (forM_, liftM, replicateM, void, when) import Control.Monad.Trans.Reader -import Database.Persist.TH (mkPersist, mkMigrate, share, sqlSettings, persistLowerCase, persistUpperCase, MkPersistSettings(..)) +import Data.Aeson (FromJSON, ToJSON, Value(..)) import Database.Persist.Sql.Raw.QQ +import Database.Persist.TH + ( MkPersistSettings(..) + , migrateModels + , setImplicitIdDef + , mkEntityDefList + , mkMigrate + , mkPersist + , persistLowerCase + , persistUpperCase + , share + , sqlSettings + ) import Test.Hspec import Test.QuickCheck.Instances () +import Web.Internal.HttpApiData +import Web.PathPieces +import Database.Persist.ImplicitIdDef -- testing -import Test.HUnit ((@?=),(@=?), Assertion, assertFailure, assertBool) +import Test.HUnit (Assertion, assertBool, assertFailure, (@=?), (@?=)) import Control.Monad (unless, (>=>)) -import Control.Monad.IO.Unlift (MonadUnliftIO) import Control.Monad.IO.Class +import Control.Monad.IO.Unlift (MonadUnliftIO) import Control.Monad.Logger import Control.Monad.Trans.Resource (ResourceT, runResourceT) import qualified Data.ByteString as BS import Data.Int (Int32, Int64) import Data.Text (Text) +import qualified Data.Text.Encoding as TE import qualified Database.MySQL.Base as MySQL import System.Log.FastLogger (fromLogStr) import Database.Persist import Database.Persist.MySQL import Database.Persist.Sql -import Database.Persist.TH () _debugOn :: Bool _debugOn = False @@ -122,3 +154,22 @@ runConn f = do db :: SqlPersistT (LoggingT (ResourceT IO)) () -> Assertion db actions = do runResourceT $ runConn $ actions >> transactionUndo + +newtype UUID = UUID { unUUID :: Text } + deriving stock + (Show, Eq, Ord, Read) + deriving newtype + ( ToJSON, FromJSON + , PersistField, PersistFieldSql + , FromHttpApiData, ToHttpApiData, PathPiece + ) + +sqlSettingsUuid :: Text -> MkPersistSettings +sqlSettingsUuid defExpr = + let + uuidDef = + setImplicitIdDefMaxLen 100 $ mkImplicitIdDef @UUID defExpr + settings = + setImplicitIdDef uuidDef sqlSettings + in + settings diff --git a/persistent-mysql/test/main.hs b/persistent-mysql/test/main.hs index 56e165d8f..a0551dafb 100644 --- a/persistent-mysql/test/main.hs +++ b/persistent-mysql/test/main.hs @@ -1,24 +1,28 @@ +{-# LANGUAGE DataKinds #-} +{-# LANGUAGE DerivingStrategies #-} {-# LANGUAGE ExistentialQuantification #-} -{-# LANGUAGE DataKinds, FlexibleInstances #-} -{-# LANGUAGE QuasiQuotes #-} +{-# LANGUAGE FlexibleInstances #-} +{-# LANGUAGE GeneralizedNewtypeDeriving #-} {-# LANGUAGE MultiParamTypeClasses #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE QuasiQuotes #-} +{-# LANGUAGE StandaloneDeriving #-} {-# LANGUAGE TemplateHaskell #-} {-# LANGUAGE TypeFamilies #-} {-# LANGUAGE UndecidableInstances #-} -{-# LANGUAGE StandaloneDeriving #-} -{-# LANGUAGE DerivingStrategies #-} -{-# LANGUAGE GeneralizedNewtypeDeriving #-} + {-# OPTIONS_GHC -Wno-unused-top-binds #-} import MyInit -import Data.Time (Day, UTCTime (..), TimeOfDay, timeToTimeOfDay, timeOfDayToTime) -import Data.Time.Clock.POSIX (utcTimeToPOSIXSeconds, posixSecondsToUTCTime) +import qualified Data.ByteString as BS import Data.Fixed -import Test.QuickCheck -import qualified Data.Text as T import Data.IntMap (IntMap) -import qualified Data.ByteString as BS +import qualified Data.Text as T +import Data.Time (Day, TimeOfDay, UTCTime(..), timeOfDayToTime, timeToTimeOfDay) +import Data.Time.Clock.POSIX (posixSecondsToUTCTime, utcTimeToPOSIXSeconds) +import Database.Persist.Sql +import Test.QuickCheck import qualified CompositeTest import qualified CustomPersistFieldTest @@ -35,25 +39,26 @@ import qualified MaxLenTest import qualified MigrationColumnLengthTest import qualified MigrationIdempotencyTest import qualified MigrationOnlyTest -import qualified MpsNoPrefixTest import qualified MpsCustomPrefixTest -import qualified PersistentTest +import qualified MpsNoPrefixTest import qualified PersistUniqueTest +import qualified PersistentTest -- FIXME: Not used... should it be? -- import qualified PrimaryTest import qualified RawSqlTest import qualified ReadWriteTest import qualified Recursive -- TODO: can't use this as MySQL can't do DEFAULT CURRENT_DATE +import qualified CustomConstraintTest +import qualified ForeignKey +import qualified GeneratedColumnTestSQL +import qualified ImplicitUuidSpec +import qualified LongIdentifierTest import qualified RenameTest import qualified SumTypeTest import qualified TransactionLevelTest import qualified UniqueTest import qualified UpsertTest -import qualified CustomConstraintTest -import qualified LongIdentifierTest -import qualified GeneratedColumnTestSQL -import qualified ForeignKey type Tuple a b = (a, b) @@ -109,98 +114,101 @@ setup migration = do main :: IO () main = do - runConn $ do - mapM_ setup - [ PersistentTest.testMigrate - , PersistentTest.noPrefixMigrate - , PersistentTest.customPrefixMigrate - , EmbedTest.embedMigrate - , EmbedOrderTest.embedOrderMigrate - , LargeNumberTest.numberMigrate - , UniqueTest.uniqueMigrate - , MaxLenTest.maxlenMigrate - , Recursive.recursiveMigrate - , CompositeTest.compositeMigrate - , PersistUniqueTest.migration - , RenameTest.migration - , CustomPersistFieldTest.customFieldMigrate - , InsertDuplicateUpdate.duplicateMigrate - , MigrationIdempotencyTest.migration - , CustomPrimaryKeyReferenceTest.migration - , MigrationColumnLengthTest.migration - , TransactionLevelTest.migration - -- , LongIdentifierTest.migration - , ForeignKey.compositeMigrate - ] - PersistentTest.cleanDB - ForeignKey.cleanDB + runConn $ do + mapM_ setup + [ PersistentTest.testMigrate + , PersistentTest.noPrefixMigrate + , PersistentTest.customPrefixMigrate + , EmbedTest.embedMigrate + , EmbedOrderTest.embedOrderMigrate + , LargeNumberTest.numberMigrate + , UniqueTest.uniqueMigrate + , MaxLenTest.maxlenMigrate + , Recursive.recursiveMigrate + , CompositeTest.compositeMigrate + , PersistUniqueTest.migration + , RenameTest.migration + , CustomPersistFieldTest.customFieldMigrate + , InsertDuplicateUpdate.duplicateMigrate + , MigrationIdempotencyTest.migration + , CustomPrimaryKeyReferenceTest.migration + , MigrationColumnLengthTest.migration + , TransactionLevelTest.migration + -- , LongIdentifierTest.migration + , ForeignKey.compositeMigrate + ] + PersistentTest.cleanDB + ForeignKey.cleanDB - hspec $ do - xdescribe "This is pending on MySQL because you can't have DEFAULT CURRENT_DATE" $ do - RenameTest.specsWith db - DataTypeTest.specsWith - db - (Just (runMigrationSilent dataTypeMigrate)) - [ TestFn "text" dataTypeTableText - , TestFn "textMaxLen" dataTypeTableTextMaxLen - , TestFn "bytes" dataTypeTableBytes - , TestFn "bytesTextTuple" dataTypeTableBytesTextTuple - , TestFn "bytesMaxLen" dataTypeTableBytesMaxLen - , TestFn "int" dataTypeTableInt - , TestFn "intList" dataTypeTableIntList - , TestFn "intMap" dataTypeTableIntMap - , TestFn "bool" dataTypeTableBool - , TestFn "day" dataTypeTableDay - , TestFn "time" (roundTime . dataTypeTableTime) - , TestFn "utc" (roundUTCTime . dataTypeTableUtc) - , TestFn "timeFrac" (dataTypeTableTimeFrac) - , TestFn "utcFrac" (dataTypeTableUtcFrac) - ] - [ ("pico", dataTypeTablePico) ] - dataTypeTableDouble - HtmlTest.specsWith - db - (Just (runMigrationSilent HtmlTest.htmlMigrate)) - EmbedTest.specsWith db - EmbedOrderTest.specsWith db - LargeNumberTest.specsWith db - UniqueTest.specsWith db - MaxLenTest.specsWith db - Recursive.specsWith db - SumTypeTest.specsWith db (Just (runMigrationSilent SumTypeTest.sumTypeMigrate)) - MigrationOnlyTest.specsWith db - (Just - $ runMigrationSilent MigrationOnlyTest.migrateAll1 - >> runMigrationSilent MigrationOnlyTest.migrateAll2 - ) - PersistentTest.specsWith db - PersistentTest.filterOrSpecs db - ReadWriteTest.specsWith db - RawSqlTest.specsWith db - UpsertTest.specsWith - db - UpsertTest.Don'tUpdateNull - UpsertTest.UpsertPreserveOldKey + hspec $ do + ImplicitUuidSpec.spec + xdescribe "This is pending on MySQL because you can't have DEFAULT CURRENT_DATE" $ do + RenameTest.specsWith db + DataTypeTest.specsWith + db + (Just (runMigrationSilent dataTypeMigrate)) + [ TestFn "text" dataTypeTableText + , TestFn "textMaxLen" dataTypeTableTextMaxLen + , TestFn "bytes" dataTypeTableBytes + , TestFn "bytesTextTuple" dataTypeTableBytesTextTuple + , TestFn "bytesMaxLen" dataTypeTableBytesMaxLen + , TestFn "int" dataTypeTableInt + , TestFn "intList" dataTypeTableIntList + , TestFn "intMap" dataTypeTableIntMap + , TestFn "bool" dataTypeTableBool + , TestFn "day" dataTypeTableDay + , TestFn "time" (roundTime . dataTypeTableTime) + , TestFn "utc" (roundUTCTime . dataTypeTableUtc) + , TestFn "timeFrac" (dataTypeTableTimeFrac) + , TestFn "utcFrac" (dataTypeTableUtcFrac) + ] + [ ("pico", dataTypeTablePico) ] + dataTypeTableDouble + HtmlTest.specsWith + db + (Just (runMigrationSilent HtmlTest.htmlMigrate)) + EmbedTest.specsWith db + EmbedOrderTest.specsWith db + LargeNumberTest.specsWith db + UniqueTest.specsWith db + MaxLenTest.specsWith db + Recursive.specsWith db + SumTypeTest.specsWith db (Just (runMigrationSilent SumTypeTest.sumTypeMigrate)) + MigrationOnlyTest.specsWith db + (Just $ do + void $ rawExecute "DROP TABLE IF EXISTS referencing;" [] + void $ rawExecute "DROP TABLE IF EXISTS two_field;" [] + void $ runMigrationSilent MigrationOnlyTest.migrateAll1 + void $ runMigrationSilent MigrationOnlyTest.migrateAll2 + ) + PersistentTest.specsWith db + PersistentTest.filterOrSpecs db + ReadWriteTest.specsWith db + RawSqlTest.specsWith db + UpsertTest.specsWith + db + UpsertTest.Don'tUpdateNull + UpsertTest.UpsertPreserveOldKey - ForeignKey.specsWith db - MpsNoPrefixTest.specsWith db - MpsCustomPrefixTest.specsWith db - EmptyEntityTest.specsWith db (Just (runMigrationSilent EmptyEntityTest.migration)) - CompositeTest.specsWith db - PersistUniqueTest.specsWith db - CustomPersistFieldTest.specsWith db - CustomPrimaryKeyReferenceTest.specsWith db - InsertDuplicateUpdate.specs - MigrationColumnLengthTest.specsWith db - EquivalentTypeTest.specsWith db - TransactionLevelTest.specsWith db + ForeignKey.specsWith db + MpsNoPrefixTest.specsWith db + MpsCustomPrefixTest.specsWith db + EmptyEntityTest.specsWith db (Just (runMigrationSilent EmptyEntityTest.migration)) + CompositeTest.specsWith db + PersistUniqueTest.specsWith db + CustomPersistFieldTest.specsWith db + CustomPrimaryKeyReferenceTest.specsWith db + InsertDuplicateUpdate.specs + MigrationColumnLengthTest.specsWith db + EquivalentTypeTest.specsWith db + TransactionLevelTest.specsWith db - MigrationIdempotencyTest.specsWith db - CustomConstraintTest.specs db - -- TODO: implement automatic truncation for too long foreign keys, so we can run this test. - xdescribe "The migration for this test currently fails because of MySQL's 64 character limit for identifiers. See https://github.com/yesodweb/persistent/issues/1000 for details" $ - LongIdentifierTest.specsWith db - GeneratedColumnTestSQL.specsWith db + MigrationIdempotencyTest.specsWith db + CustomConstraintTest.specs db + -- TODO: implement automatic truncation for too long foreign keys, so we can run this test. + xdescribe "The migration for this test currently fails because of MySQL's 64 character limit for identifiers. See https://github.com/yesodweb/persistent/issues/1000 for details" $ + LongIdentifierTest.specsWith db + GeneratedColumnTestSQL.specsWith db roundFn :: RealFrac a => a -> Integer roundFn = round diff --git a/persistent-postgresql/ChangeLog.md b/persistent-postgresql/ChangeLog.md index 580e69507..cc07605db 100644 --- a/persistent-postgresql/ChangeLog.md +++ b/persistent-postgresql/ChangeLog.md @@ -1,5 +1,11 @@ # Changelog for persistent-postgresql + +## 2.13.0.0 (unreleased) + +* [#1225](https://github.com/yesodweb/persistent/pull/1225) + * Support `persistent-2.13.0.0` making SQlBackend internal + # 2.12.1.1 * [#1235](https://github.com/yesodweb/persistent/pull/1235) diff --git a/persistent-postgresql/Database/Persist/Postgresql.hs b/persistent-postgresql/Database/Persist/Postgresql.hs index 11f565d0b..5b7a358ec 100644 --- a/persistent-postgresql/Database/Persist/Postgresql.hs +++ b/persistent-postgresql/Database/Persist/Postgresql.hs @@ -55,6 +55,8 @@ import qualified Database.PostgreSQL.Simple.Transaction as PG import qualified Database.PostgreSQL.Simple.TypeInfo.Static as PS import qualified Database.PostgreSQL.Simple.Types as PG +import Data.Proxy (Proxy(..)) +import qualified Data.List.NonEmpty as NEL import Control.Arrow import Control.Exception (Exception, throw, throwIO) import Control.Monad @@ -103,6 +105,7 @@ import Data.Time (NominalDiffTime, localTimeToUTC, utc) import System.Environment (getEnvironment) import Database.Persist.Sql +import Database.Persist.SqlBackend import qualified Database.Persist.Sql.Util as Util -- | A @libpq@ connection string. A simple example of connection @@ -348,14 +351,15 @@ openSimpleConnWithVersion getVerDouble logFunc conn = do -- and connection. createBackend :: LogFunc -> NonEmpty Word -> IORef (Map.Map Text Statement) -> PG.Connection -> SqlBackend -createBackend logFunc serverVersion smap conn = do - SqlBackend +createBackend logFunc serverVersion smap conn = + maybe id setConnPutManySql (upsertFunction putManySql serverVersion) $ + maybe id setConnUpsertSql (upsertFunction upsertSql' serverVersion) $ + setConnInsertManySql insertManySql' $ + maybe id setConnRepsertManySql (upsertFunction repsertManySql serverVersion) $ + mkSqlBackend MkSqlBackendArgs { connPrepare = prepare' conn , connStmtMap = smap , connInsertSql = insertSql' - , connInsertManySql = Just insertManySql' - , connUpsertSql = upsertFunction upsertSql' serverVersion - , connPutManySql = upsertFunction putManySql serverVersion , connClose = PG.close conn , connMigrateSql = migrate' , connBegin = \_ mIsolation -> case mIsolation of @@ -368,14 +372,12 @@ createBackend logFunc serverVersion smap conn = do , connCommit = const $ PG.commit conn , connRollback = const $ PG.rollback conn , connEscapeFieldName = escapeF - , connEscapeTableName = escapeE . entityDB + , connEscapeTableName = escapeE . getEntityDBName , connEscapeRawName = escape , connNoLimit = "LIMIT ALL" , connRDBMS = "postgresql" , connLimitOffset = decorateSQLWithLimitOffset "LIMIT ALL" , connLogFunc = logFunc - , connMaxParams = Nothing - , connRepsertManySql = upsertFunction repsertManySql serverVersion } prepare' :: PG.Connection -> Text -> IO Statement @@ -390,15 +392,17 @@ prepare' conn sql = do insertSql' :: EntityDef -> [PersistValue] -> InsertSqlResult insertSql' ent vals = - case entityPrimary ent of - Just _pdef -> ISRManyKeys sql vals - Nothing -> ISRSingle (sql <> " RETURNING " <> escapeF (fieldDB (entityId ent))) + case getEntityId ent of + EntityIdNaturalKey _pdef -> + ISRManyKeys sql vals + EntityIdField field -> + ISRSingle (sql <> " RETURNING " <> escapeF (fieldDB field)) where (fieldNames, placeholders) = unzip (Util.mkInsertPlaceholders ent escapeF) sql = T.concat [ "INSERT INTO " - , escapeE $ entityDB ent - , if null (entityFields ent) + , escapeE $ getEntityDBName ent + , if null (getEntityFieldsDatabase ent) then " DEFAULT VALUES" else T.concat [ "(" @@ -413,7 +417,7 @@ upsertSql' :: EntityDef -> NonEmpty (FieldNameHS, FieldNameDB) -> Text -> Text upsertSql' ent uniqs updateVal = T.concat [ "INSERT INTO " - , escapeE (entityDB ent) + , escapeE (getEntityDBName ent) , "(" , T.intercalate "," fieldNames , ") VALUES (" @@ -432,7 +436,7 @@ upsertSql' ent uniqs updateVal = wher = T.intercalate " AND " $ map (singleClause . snd) $ NEL.toList uniqs singleClause :: FieldNameDB -> Text - singleClause field = escapeE (entityDB ent) <> "." <> (escapeF field) <> " =?" + singleClause field = escapeE (getEntityDBName ent) <> "." <> (escapeF field) <> " =?" -- | SQL for inserting multiple rows at once and returning their primary keys. insertManySql' :: EntityDef -> [[PersistValue]] -> InsertSqlResult @@ -442,13 +446,13 @@ insertManySql' ent valss = (fieldNames, placeholders)= unzip (Util.mkInsertPlaceholders ent escapeF) sql = T.concat [ "INSERT INTO " - , escapeE (entityDB ent) + , escapeE (getEntityDBName ent) , "(" , T.intercalate "," fieldNames , ") VALUES (" , T.intercalate "),(" $ replicate (length valss) $ T.intercalate "," placeholders , ") RETURNING " - , Util.commaSeparated $ Util.dbIdColumnsEsc escapeF ent + , Util.commaSeparated $ NEL.toList $ Util.dbIdColumnsEsc escapeF ent ] @@ -789,7 +793,7 @@ migrate' allDefs getter entity = fmap (fmap $ map showAlterDb) $ do return $ Right $ migrationText exists' old'' (errs, _) -> return $ Left errs where - name = entityDB entity + name = getEntityDBName entity (newcols', udefs, fdefs) = postgresMkColumns allDefs entity migrationText exists' old'' | not exists' = @@ -827,7 +831,7 @@ mkForeignAlt -> Maybe AlterDB mkForeignAlt entity fdef = pure $ AlterColumn tableName_ addReference where - tableName_ = entityDB entity + tableName_ = getEntityDBName entity addReference = AddReference (foreignRefTableDBName fdef) @@ -860,23 +864,23 @@ addTable cols entity = Just _ -> cols _ -> - filter (\c -> cName c /= fieldDB (entityId entity) ) cols + filter (\c -> Just (cName c) /= fmap fieldDB (getEntityIdField entity) ) cols name = - entityDB entity + getEntityDBName entity idtxt = - case entityPrimary entity of - Just pdef -> + case getEntityId entity of + EntityIdNaturalKey pdef -> T.concat [ " PRIMARY KEY (" - , T.intercalate "," $ map (escapeF . fieldDB) $ compositeFields pdef + , T.intercalate "," $ map (escapeF . fieldDB) $ NEL.toList $ compositeFields pdef , ")" ] - Nothing -> - let defText = defaultAttribute $ fieldAttrs $ entityId entity - sType = fieldSqlType $ entityId entity + EntityIdField field -> + let defText = defaultAttribute $ fieldAttrs field + sType = fieldSqlType field in T.concat - [ escapeF $ fieldDB (entityId entity) + [ escapeF $ fieldDB field , maySerial sType defText , " PRIMARY KEY UNIQUE" , mayDefault defText @@ -947,7 +951,7 @@ getColumns getter def cols = do stmt <- getter sqlv let vals = - [ PersistText $ unEntityNameDB $ entityDB def + [ PersistText $ unEntityNameDB $ getEntityDBName def ] columns <- with (stmtQuery stmt vals) (\src -> runConduit $ src .| processColumns .| CL.consume) let sqlc = T.concat @@ -994,7 +998,7 @@ getColumns getter def cols = do $ groupBy ((==) `on` fst) rows processColumns = CL.mapM $ \x'@((PersistText cname) : _) -> do - col <- liftIO $ getColumn getter (entityDB def) x' (Map.lookup cname refMap) + col <- liftIO $ getColumn getter (getEntityDBName def) x' (Map.lookup cname refMap) pure $ case col of Left e -> Left e Right c -> Right $ Left c @@ -1005,7 +1009,7 @@ safeToRemove :: EntityDef -> FieldNameDB -> Bool safeToRemove def (FieldNameDB colName) = any (elem FieldAttrSafeToRemove . fieldAttrs) $ filter ((== FieldNameDB colName) . fieldDB) - $ keyAndEntityFields def + $ NEL.toList $ keyAndEntityFields def getAlters :: [EntityDef] -> EntityDef @@ -1248,15 +1252,15 @@ findAlters defs edef col@(Column name isNull sqltype def _gen _defConstraintName refAdd Nothing = [] refAdd (Just colRef) = - case find ((== crTableName colRef) . entityDB) defs of + case find ((== crTableName colRef) . getEntityDBName) defs of Just refdef - | _oldName /= fieldDB (entityId edef) + | Just _oldName /= fmap fieldDB (getEntityIdField edef) -> [AddReference - (entityDB edef) + (getEntityDBName edef) (crConstraintName colRef) [name] - (Util.dbIdColumnsEsc escapeF refdef) + (NEL.toList $ Util.dbIdColumnsEsc escapeF refdef) (crFieldCascade colRef) ] Just _ -> [] @@ -1269,7 +1273,7 @@ findAlters defs edef col@(Column name isNull sqltype def _gen _defConstraintName else refDrop ref' ++ refAdd ref modNull = case (isNull, isNull') of (True, False) -> do - guard $ name /= fieldDB (entityId edef) + guard $ Just name /= fmap fieldDB (getEntityIdField edef) pure (IsNull col) (False, True) -> let up = case def of @@ -1328,19 +1332,19 @@ getAddReference -> ColumnReference -> Maybe AlterDB getAddReference allDefs entity cname cr@ColumnReference {crTableName = s, crConstraintName=constraintName} = do - guard $ cname /= fieldDB (entityId entity) + guard $ Just cname /= fmap fieldDB (getEntityIdField entity) pure $ AlterColumn table (AddReference s constraintName [cname] id_ (crFieldCascade cr) ) where - table = entityDB entity + table = getEntityDBName entity id_ = fromMaybe (error $ "Could not find ID of entity " ++ show s) $ do - entDef <- find ((== s) . entityDB) allDefs - return $ Util.dbIdColumnsEsc escapeF entDef + entDef <- find ((== s) . getEntityDBName) allDefs + return $ NEL.toList $ Util.dbIdColumnsEsc escapeF entDef showColumn :: Column -> Text showColumn (Column n nu sqlType' def gen _defConstraintName _maxLen _ref) = T.concat @@ -1661,7 +1665,7 @@ maximumIdentifierLength :: Int maximumIdentifierLength = 63 udToPair :: UniqueDef -> (ConstraintNameDB, [FieldNameDB]) -udToPair ud = (uniqueDBName ud, map snd $ uniqueFields ud) +udToPair ud = (uniqueDBName ud, map snd $ NEL.toList $ uniqueFields ud) mockMigrate :: [EntityDef] -> (Text -> IO Statement) @@ -1672,7 +1676,7 @@ mockMigrate allDefs _ entity = fmap (fmap $ map showAlterDb) $ do ([], old'') -> return $ Right $ migrationText False old'' (errs, _) -> return $ Left errs where - name = entityDB entity + name = getEntityDBName entity migrationText exists' old'' = if not exists' then createText newcols fdefs udspair @@ -1706,49 +1710,46 @@ mockMigrate allDefs _ entity = fmap (fmap $ map showAlterDb) $ do -- with the difference that an actual database is not needed. mockMigration :: Migration -> IO () mockMigration mig = do - smap <- newIORef $ Map.empty - let sqlbackend = SqlBackend { connPrepare = \_ -> do - return Statement - { stmtFinalize = return () - , stmtReset = return () - , stmtExecute = undefined - , stmtQuery = \_ -> return $ return () - }, - connInsertManySql = Nothing, - connInsertSql = undefined, - connUpsertSql = Nothing, - connPutManySql = Nothing, - connStmtMap = smap, - connClose = undefined, - connMigrateSql = mockMigrate, - connBegin = undefined, - connCommit = undefined, - connRollback = undefined, - connEscapeFieldName = escapeF, - connEscapeTableName = escapeE . entityDB, - connEscapeRawName = escape, - connNoLimit = undefined, - connRDBMS = undefined, - connLimitOffset = undefined, - connLogFunc = undefined, - connMaxParams = Nothing, - connRepsertManySql = Nothing - } - result = runReaderT $ runWriterT $ runWriterT mig - resp <- result sqlbackend - mapM_ T.putStrLn $ map snd $ snd resp + smap <- newIORef $ Map.empty + let sqlbackend = + mkSqlBackend MkSqlBackendArgs + { connPrepare = \_ -> do + return Statement + { stmtFinalize = return () + , stmtReset = return () + , stmtExecute = undefined + , stmtQuery = \_ -> return $ return () + } + , connInsertSql = undefined + , connStmtMap = smap + , connClose = undefined + , connMigrateSql = mockMigrate + , connBegin = undefined + , connCommit = undefined + , connRollback = undefined + , connEscapeFieldName = escapeF + , connEscapeTableName = escapeE . getEntityDBName + , connEscapeRawName = escape + , connNoLimit = undefined + , connRDBMS = undefined + , connLimitOffset = undefined + , connLogFunc = undefined + } + result = runReaderT $ runWriterT $ runWriterT mig + resp <- result sqlbackend + mapM_ T.putStrLn $ map snd $ snd resp putManySql :: EntityDef -> Int -> Text putManySql ent n = putManySql' conflictColumns fields ent n where - fields = entityFields ent - conflictColumns = concatMap (map (escapeF . snd) . uniqueFields) (entityUniques ent) + fields = getEntityFieldsDatabase ent + conflictColumns = concatMap (map (escapeF . snd) . NEL.toList . uniqueFields) (getEntityUniques ent) repsertManySql :: EntityDef -> Int -> Text repsertManySql ent n = putManySql' conflictColumns fields ent n where - fields = keyAndEntityFields ent - conflictColumns = escapeF . fieldDB <$> entityKeyFields ent + fields = NEL.toList $ keyAndEntityFields ent + conflictColumns = NEL.toList $ escapeF . fieldDB <$> getEntityKeyFields ent -- | This type is used to determine how to update rows using Postgres' -- @INSERT ... ON CONFLICT KEY UPDATE@ functionality, exposed via @@ -1860,11 +1861,7 @@ upsertManyWhere upsertManyWhere [] _ _ _ = return () upsertManyWhere records fieldValues updates filters = do conn <- asks projectBackend - let uniqDef = -- onlyOneUniqueDef (Nothing :: Maybe record) - case entityUniques (entityDef (Nothing :: Maybe record)) of - [uniq] -> uniq - _ -> error "impossible due to OnlyOneUniqueKey constraint" - -- TODO: use onlyOneUniqueDef when it's exported + let uniqDef = onlyOneUniqueDef (Proxy :: Proxy record) uncurry rawExecute $ mkBulkUpsertQuery records conn fieldValues updates filters uniqDef @@ -1927,12 +1924,12 @@ mkBulkUpsertQuery records conn fieldValues updates filters uniqDef = fieldDbToText = escapeF . fieldDB entityDef' = entityDef records conflictColumns = - map (escapeF . snd) $ uniqueFields uniqDef + map (escapeF . snd) $ NEL.toList $ uniqueFields uniqDef firstField = case entityFieldNames of [] -> error "The entity you're trying to insert does not have any fields." (field:_) -> field - entityFieldNames = map fieldDbToText (entityFields entityDef') - nameOfTable = escapeE . entityDB $ entityDef' + entityFieldNames = map fieldDbToText (getEntityFieldsDatabase entityDef') + nameOfTable = escapeE . getEntityDBName $ entityDef' copyUnlessValues = map snd fieldsToMaybeCopy recordValues = concatMap (map toPersistValue . toPersistFields) records recordPlaceholders = @@ -1994,7 +1991,7 @@ putManySql' conflictColumns (filter isFieldNotGenerated -> fields) ent n = q fieldDbToText = escapeF . fieldDB mkAssignment f = T.concat [f, "=EXCLUDED.", f] - table = escapeE . entityDB $ ent + table = escapeE . getEntityDBName $ ent columns = Util.commaSeparated $ map fieldDbToText fields placeholders = map (const "?") fields updates = map (mkAssignment . fieldDbToText) fields @@ -2025,7 +2022,5 @@ migrateEnableExtension extName = WriterT $ WriterT $ do postgresMkColumns :: [EntityDef] -> EntityDef -> ([Column], [UniqueDef], [ForeignDef]) postgresMkColumns allDefs t = - mkColumns allDefs t (emptyBackendSpecificOverrides - { backendSpecificForeignKeyName = Just refName - } - ) + mkColumns allDefs t + $ setBackendSpecificForeignKeyName refName emptyBackendSpecificOverrides diff --git a/persistent-postgresql/persistent-postgresql.cabal b/persistent-postgresql/persistent-postgresql.cabal index 279d4af0e..96176a24b 100644 --- a/persistent-postgresql/persistent-postgresql.cabal +++ b/persistent-postgresql/persistent-postgresql.cabal @@ -1,5 +1,5 @@ name: persistent-postgresql -version: 2.12.1.1 +version: 2.13.0.0 license: MIT license-file: LICENSE author: Felipe Lessa, Michael Snoyman @@ -16,7 +16,7 @@ extra-source-files: ChangeLog.md library build-depends: base >= 4.9 && < 5 - , persistent >= 2.12.1.0 && < 2.13 + , persistent >= 2.13 && < 3 , aeson >= 1.0 , attoparsec , blaze-builder @@ -54,6 +54,7 @@ test-suite test CustomConstraintTest PgIntervalTest UpsertWhere + ImplicitUuidSpec ghc-options: -Wall build-depends: base >= 4.9 && < 5 @@ -76,6 +77,8 @@ test-suite test , text , time , transformers + , path-pieces + , http-api-data , unliftio-core , unliftio , unordered-containers diff --git a/persistent-postgresql/test/ArrayAggTest.hs b/persistent-postgresql/test/ArrayAggTest.hs index 1f8167165..b8902a114 100644 --- a/persistent-postgresql/test/ArrayAggTest.hs +++ b/persistent-postgresql/test/ArrayAggTest.hs @@ -43,7 +43,7 @@ specs = do , UserPT "c" $ Just "d" , UserPT "e" Nothing , UserPT "g" $ Just "h" ] - escape <- asks connEscapeRawName + escape <- getEscapeRawNameFunction let query = T.concat [ "SELECT array_agg(", escape dbField, ") " , "FROM ", escape "UserPT" ] diff --git a/persistent-postgresql/test/ImplicitUuidSpec.hs b/persistent-postgresql/test/ImplicitUuidSpec.hs new file mode 100644 index 000000000..68f5fd587 --- /dev/null +++ b/persistent-postgresql/test/ImplicitUuidSpec.hs @@ -0,0 +1,75 @@ +{-# LANGUAGE DataKinds #-} +{-# LANGUAGE DerivingStrategies #-} +{-# LANGUAGE ExistentialQuantification #-} +{-# LANGUAGE FlexibleInstances #-} +{-# LANGUAGE GeneralizedNewtypeDeriving #-} +{-# LANGUAGE MultiParamTypeClasses #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE QuasiQuotes #-} +{-# LANGUAGE StandaloneDeriving #-} +{-# LANGUAGE TemplateHaskell #-} +{-# LANGUAGE TypeApplications #-} +{-# LANGUAGE TypeFamilies #-} +{-# LANGUAGE UndecidableInstances #-} + +module ImplicitUuidSpec where + +import PgInit + +import Data.Proxy +import Database.Persist.Postgresql + +import Database.Persist.ImplicitIdDef +import Database.Persist.ImplicitIdDef.Internal (fieldTypeFromTypeable) + +share + [ mkPersist (sqlSettingsUuid "uuid_generate_v1mc()") + , mkEntityDefList "entities" + ] + [persistLowerCase| + +WithDefUuid + name Text sqltype=varchar(80) + + deriving Eq Show Ord + +|] + +implicitUuidMigrate :: Migration +implicitUuidMigrate = do + runSqlCommand $ rawExecute "CREATE EXTENSION IF NOT EXISTS \"uuid-ossp\"" [] + migrateModels entities + +wipe :: IO () +wipe = runConnAssert $ do + rawExecute "DROP TABLE with_def_uuid;" [] + runMigration implicitUuidMigrate + +itDb :: String -> SqlPersistT (LoggingT (ResourceT IO)) a -> SpecWith (Arg (IO ())) +itDb msg action = it msg $ runConnAssert $ void action + +pass :: IO () +pass = pure () + +spec :: Spec +spec = describe "ImplicitUuidSpec" $ before_ wipe $ do + describe "WithDefUuidKey" $ do + it "works on UUIDs" $ do + let withDefUuidKey = WithDefUuidKey (UUID "Hello") + pass + describe "getEntityId" $ do + let Just idField = getEntityIdField (entityDef (Proxy @WithDefUuid)) + it "has a UUID SqlType" $ asIO $ do + fieldSqlType idField `shouldBe` SqlOther "UUID" + it "is an implicit ID column" $ asIO $ do + fieldIsImplicitIdColumn idField `shouldBe` True + + describe "insert" $ do + itDb "successfully has a default" $ do + let matt = WithDefUuid + { withDefUuidName = + "Matt" + } + k <- insert matt + mrec <- get k + mrec `shouldBe` Just matt diff --git a/persistent-postgresql/test/PgInit.hs b/persistent-postgresql/test/PgInit.hs index 5cc14c55d..dec295ad7 100644 --- a/persistent-postgresql/test/PgInit.hs +++ b/persistent-postgresql/test/PgInit.hs @@ -1,3 +1,5 @@ +{-# LANGUAGE DerivingStrategies #-} +{-# LANGUAGE GeneralizedNewtypeDeriving #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE ScopedTypeVariables #-} {-# OPTIONS_GHC -fno-warn-orphans #-} @@ -18,6 +20,7 @@ module PgInit , module Control.Monad.Trans.Reader , module Control.Monad , module Database.Persist.Sql + , module Database.Persist.SqlBackend , module Database.Persist , module Database.Persist.Sql.Raw.QQ , module Init @@ -27,12 +30,16 @@ module PgInit , BS.ByteString , Int32, Int64 , liftIO - , mkPersist, mkMigrate, share, sqlSettings, persistLowerCase, persistUpperCase + , mkPersist, migrateModels, mkMigrate, share, sqlSettings, persistLowerCase, persistUpperCase + , mkEntityDefList + , setImplicitIdDef , SomeException , Text , TestFn(..) , LoggingT , ResourceT + , UUID(..) + , sqlSettingsUuid ) where import Init @@ -53,37 +60,54 @@ import Init , (==@) , (@/=) , (@==) + , UUID(..) + , sqlSettingsUuid ) -- re-exports import Control.Exception (SomeException) import Control.Monad (forM_, liftM, replicateM, void, when) import Control.Monad.Trans.Reader -import Data.Aeson (Value(..)) +import Data.Aeson (ToJSON, FromJSON, Value(..)) import Database.Persist.Postgresql.JSON () import Database.Persist.Sql.Raw.QQ +import Database.Persist.SqlBackend import Database.Persist.TH ( MkPersistSettings(..) , mkMigrate + , migrateModels , mkPersist , persistLowerCase , persistUpperCase , share , sqlSettings + , setImplicitIdDef + , mkEntityDefList ) import Test.Hspec - (Spec, afterAll_, before, beforeAll, describe, fdescribe, fit, it, - before_, SpecWith, Arg, hspec) + ( Arg + , Spec + , SpecWith + , afterAll_ + , before + , beforeAll + , before_ + , describe + , fdescribe + , fit + , hspec + , it + ) import Test.Hspec.Expectations.Lifted import Test.QuickCheck.Instances () import UnliftIO +import qualified Data.Text.Encoding as TE -- testing import Test.HUnit (Assertion, assertBool, assertFailure, (@=?), (@?=)) import Test.QuickCheck import Control.Monad (unless, (>=>)) - import Control.Monad.IO.Unlift (MonadUnliftIO) import Control.Monad.Logger import Control.Monad.Trans.Resource (ResourceT, runResourceT) diff --git a/persistent-postgresql/test/main.hs b/persistent-postgresql/test/main.hs index 60543a349..ecd91a77b 100644 --- a/persistent-postgresql/test/main.hs +++ b/persistent-postgresql/test/main.hs @@ -20,6 +20,7 @@ import qualified Data.Text as T import Data.Time import Test.QuickCheck +import qualified ImplicitUuidSpec import qualified ArrayAggTest import qualified CompositeTest import qualified ForeignKey @@ -130,74 +131,76 @@ main = do , MigrationTest.migrationMigrate , PgIntervalTest.pgIntervalMigrate , UpsertWhere.upsertWhereMigrate + , ImplicitUuidSpec.implicitUuidMigrate ] PersistentTest.cleanDB ForeignKey.cleanDB hspec $ do - RenameTest.specsWith runConnAssert - DataTypeTest.specsWith runConnAssert - (Just (runMigrationSilent dataTypeMigrate)) - [ TestFn "text" dataTypeTableText - , TestFn "textMaxLen" dataTypeTableTextMaxLen - , TestFn "bytes" dataTypeTableBytes - , TestFn "bytesTextTuple" dataTypeTableBytesTextTuple - , TestFn "bytesMaxLen" dataTypeTableBytesMaxLen - , TestFn "int" dataTypeTableInt - , TestFn "intList" dataTypeTableIntList - , TestFn "intMap" dataTypeTableIntMap - , TestFn "bool" dataTypeTableBool - , TestFn "day" dataTypeTableDay - , TestFn "time" (DataTypeTest.roundTime . dataTypeTableTime) - , TestFn "utc" (DataTypeTest.roundUTCTime . dataTypeTableUtc) - , TestFn "jsonb" dataTypeTableJsonb - ] - [ ("pico", dataTypeTablePico) ] - dataTypeTableDouble - HtmlTest.specsWith - runConnAssert - (Just (runMigrationSilent HtmlTest.htmlMigrate)) + ImplicitUuidSpec.spec + RenameTest.specsWith runConnAssert + DataTypeTest.specsWith runConnAssert + (Just (runMigrationSilent dataTypeMigrate)) + [ TestFn "text" dataTypeTableText + , TestFn "textMaxLen" dataTypeTableTextMaxLen + , TestFn "bytes" dataTypeTableBytes + , TestFn "bytesTextTuple" dataTypeTableBytesTextTuple + , TestFn "bytesMaxLen" dataTypeTableBytesMaxLen + , TestFn "int" dataTypeTableInt + , TestFn "intList" dataTypeTableIntList + , TestFn "intMap" dataTypeTableIntMap + , TestFn "bool" dataTypeTableBool + , TestFn "day" dataTypeTableDay + , TestFn "time" (DataTypeTest.roundTime . dataTypeTableTime) + , TestFn "utc" (DataTypeTest.roundUTCTime . dataTypeTableUtc) + , TestFn "jsonb" dataTypeTableJsonb + ] + [ ("pico", dataTypeTablePico) ] + dataTypeTableDouble + HtmlTest.specsWith + runConnAssert + (Just (runMigrationSilent HtmlTest.htmlMigrate)) - EmbedTest.specsWith runConnAssert - EmbedOrderTest.specsWith runConnAssert - LargeNumberTest.specsWith runConnAssert - ForeignKey.specsWith runConnAssert - UniqueTest.specsWith runConnAssert - MaxLenTest.specsWith runConnAssert - Recursive.specsWith runConnAssert - SumTypeTest.specsWith runConnAssert (Just (runMigrationSilent SumTypeTest.sumTypeMigrate)) - MigrationTest.specsWith runConnAssert - MigrationOnlyTest.specsWith runConnAssert + EmbedTest.specsWith runConnAssert + EmbedOrderTest.specsWith runConnAssert + LargeNumberTest.specsWith runConnAssert + ForeignKey.specsWith runConnAssert + UniqueTest.specsWith runConnAssert + MaxLenTest.specsWith runConnAssert + Recursive.specsWith runConnAssert + SumTypeTest.specsWith runConnAssert (Just (runMigrationSilent SumTypeTest.sumTypeMigrate)) + MigrationTest.specsWith runConnAssert + MigrationOnlyTest.specsWith runConnAssert - (Just - $ runMigrationSilent MigrationOnlyTest.migrateAll1 - >> runMigrationSilent MigrationOnlyTest.migrateAll2 - ) - PersistentTest.specsWith runConnAssert - ReadWriteTest.specsWith runConnAssert - PersistentTest.filterOrSpecs runConnAssert - RawSqlTest.specsWith runConnAssert - UpsertTest.specsWith - runConnAssert - UpsertTest.Don'tUpdateNull - UpsertTest.UpsertPreserveOldKey + (Just + $ runMigrationSilent MigrationOnlyTest.migrateAll1 + >> runMigrationSilent MigrationOnlyTest.migrateAll2 + ) + PersistentTest.specsWith runConnAssert + ReadWriteTest.specsWith runConnAssert + PersistentTest.filterOrSpecs runConnAssert + RawSqlTest.specsWith runConnAssert + UpsertTest.specsWith + runConnAssert + UpsertTest.Don'tUpdateNull + UpsertTest.UpsertPreserveOldKey - MpsNoPrefixTest.specsWith runConnAssert - MpsCustomPrefixTest.specsWith runConnAssert - EmptyEntityTest.specsWith runConnAssert (Just (runMigrationSilent EmptyEntityTest.migration)) - CompositeTest.specsWith runConnAssert - TreeTest.specsWith runConnAssert - PersistUniqueTest.specsWith runConnAssert - PrimaryTest.specsWith runConnAssert - CustomPersistFieldTest.specsWith runConnAssert - CustomPrimaryKeyReferenceTest.specsWith runConnAssert - MigrationColumnLengthTest.specsWith runConnAssert - EquivalentTypeTestPostgres.specs - TransactionLevelTest.specsWith runConnAssert - LongIdentifierTest.specsWith runConnAssertUseConf -- Have at least one test use the conf variant of connecting to Postgres, to improve test coverage. - JSONTest.specs - CustomConstraintTest.specs - UpsertWhere.specs - PgIntervalTest.specs - ArrayAggTest.specs - GeneratedColumnTestSQL.specsWith runConnAssert + MpsNoPrefixTest.specsWith runConnAssert + MpsCustomPrefixTest.specsWith runConnAssert + EmptyEntityTest.specsWith runConnAssert (Just (runMigrationSilent EmptyEntityTest.migration)) + CompositeTest.specsWith runConnAssert + TreeTest.specsWith runConnAssert + PersistUniqueTest.specsWith runConnAssert + PrimaryTest.specsWith runConnAssert + CustomPersistFieldTest.specsWith runConnAssert + CustomPrimaryKeyReferenceTest.specsWith runConnAssert + MigrationColumnLengthTest.specsWith runConnAssert + EquivalentTypeTestPostgres.specs + TransactionLevelTest.specsWith runConnAssert + LongIdentifierTest.specsWith runConnAssertUseConf -- Have at least one test use the conf variant of connecting to Postgres, to improve test coverage. + JSONTest.specs + CustomConstraintTest.specs + UpsertWhere.specs + PgIntervalTest.specs + ArrayAggTest.specs + GeneratedColumnTestSQL.specsWith runConnAssert diff --git a/persistent-qq/test/PersistentTestModels.hs b/persistent-qq/test/PersistentTestModels.hs index 30216c6a2..8defbbd6d 100644 --- a/persistent-qq/test/PersistentTestModels.hs +++ b/persistent-qq/test/PersistentTestModels.hs @@ -18,10 +18,12 @@ import Data.Aeson import Data.Text (Text) import Data.Proxy +import qualified Data.List.NonEmpty as NEL import Database.Persist.Sql import Database.Persist.TH import PersistTestPetType import PersistTestPetCollarType +import Data.Foldable (toList) share [ mkPersist sqlSettings { mpsGeneric = True } @@ -144,7 +146,7 @@ instance (PersistEntity a) => PersistEntity (ReverseFieldOrder a) where entityDef = revFields . entityDef . unRfoProxy where - revFields ed = ed { entityFields = reverse (entityFields ed) } + revFields = overEntityFields reverse unRfoProxy :: proxy (ReverseFieldOrder a) -> Proxy a unRfoProxy _ = Proxy @@ -154,7 +156,7 @@ instance (PersistEntity a) => PersistEntity (ReverseFieldOrder a) where fromPersistValues = fmap RFO . fromPersistValues . reverse newtype Unique (ReverseFieldOrder a) = URFO {unURFO :: Unique a } - persistUniqueToFieldNames = reverse . persistUniqueToFieldNames . unURFO + persistUniqueToFieldNames = NEL.reverse . persistUniqueToFieldNames . unURFO persistUniqueToValues = reverse . persistUniqueToValues . unURFO persistUniqueKeys = map URFO . reverse . persistUniqueKeys . unRFO diff --git a/persistent-redis/Database/Persist/Redis/Internal.hs b/persistent-redis/Database/Persist/Redis/Internal.hs index ce0c83c1e..8f4ab66d4 100644 --- a/persistent-redis/Database/Persist/Redis/Internal.hs +++ b/persistent-redis/Database/Persist/Redis/Internal.hs @@ -14,6 +14,7 @@ import Data.Text (Text, unpack) import qualified Data.Text as T import Control.Monad.Fail (MonadFail) +import Database.Persist.EntityDef.Internal import Database.Persist.Class import Database.Persist.Types import Database.Persist.Redis.Parser diff --git a/persistent-sqlite/ChangeLog.md b/persistent-sqlite/ChangeLog.md index 9aa454e68..7a8f0a71e 100644 --- a/persistent-sqlite/ChangeLog.md +++ b/persistent-sqlite/ChangeLog.md @@ -1,5 +1,10 @@ # Changelog for persistent-sqlite +## 2.13.0.0 (unreleased) + +* [#1225](https://github.com/yesodweb/persistent/pull/1225) + * Support `persistent-2.13` changes for SqlBackend being made internal. + ## 2.12.0.0 * Decomposed `HaskellName` into `ConstraintNameHS`, `EntityNameHS`, `FieldNameHS`. Decomposed `DBName` into `ConstraintNameDB`, `EntityNameDB`, `FieldNameDB` respectively. [#1174](https://github.com/yesodweb/persistent/pull/1174) diff --git a/persistent-sqlite/Database/Persist/Sqlite.hs b/persistent-sqlite/Database/Persist/Sqlite.hs index 8b2cd8c51..f6376496a 100644 --- a/persistent-sqlite/Database/Persist/Sqlite.hs +++ b/persistent-sqlite/Database/Persist/Sqlite.hs @@ -11,12 +11,14 @@ {-# LANGUAGE TemplateHaskell #-} {-# LANGUAGE TupleSections #-} {-# LANGUAGE TypeFamilies #-} + -- Strictly, this could go as low as GHC 8.6.1, which is when DerivingVia was -- introduced - this base version requires 8.6.5+ #if MIN_VERSION_base(4,12,0) {-# LANGUAGE DerivingVia #-} {-# LANGUAGE UndecidableInstances #-} #endif + -- | A sqlite backend for persistent. -- -- Note: If you prepend @WAL=off @ to your connection string, it will disable @@ -78,18 +80,19 @@ import qualified Data.HashMap.Lazy as HashMap import Data.Int (Int64) import Data.IORef import qualified Data.Map as Map -import Data.Monoid ((<>)) import Data.Pool (Pool) import Data.Text (Text) import qualified Data.Text as T import qualified Data.Text.IO as TIO import Lens.Micro.TH (makeLenses) import UnliftIO.Resource (ResourceT, runResourceT) +import Data.Foldable (toList) #if MIN_VERSION_base(4,12,0) import Database.Persist.Compatible #endif import Database.Persist.Sql +import Database.Persist.SqlBackend import qualified Database.Persist.Sql.Util as Util import qualified Database.Sqlite as Sqlite @@ -267,28 +270,27 @@ wrapConnectionInfo connInfo conn logFunc = do Sqlite.finalize stmt smap <- newIORef $ Map.empty - return $ SqlBackend - { connPrepare = prepare' conn - , connStmtMap = smap - , connInsertSql = insertSql' - , connUpsertSql = Nothing - , connPutManySql = Just putManySql - , connInsertManySql = Nothing - , connClose = Sqlite.close conn - , connMigrateSql = migrate' - , connBegin = \f _ -> helper "BEGIN" f - , connCommit = helper "COMMIT" - , connRollback = ignoreExceptions . helper "ROLLBACK" - , connEscapeFieldName = escape . unFieldNameDB - , connEscapeTableName = escape . unEntityNameDB . entityDB - , connEscapeRawName = escape - , connNoLimit = "LIMIT -1" - , connRDBMS = "sqlite" - , connLimitOffset = decorateSQLWithLimitOffset "LIMIT -1" - , connLogFunc = logFunc - , connMaxParams = Just 999 - , connRepsertManySql = Just repsertManySql - } + return $ + setConnMaxParams 999 $ + setConnPutManySql putManySql $ + setConnRepsertManySql repsertManySql $ + mkSqlBackend MkSqlBackendArgs + { connPrepare = prepare' conn + , connStmtMap = smap + , connInsertSql = insertSql' + , connClose = Sqlite.close conn + , connMigrateSql = migrate' + , connBegin = \f _ -> helper "BEGIN" f + , connCommit = helper "COMMIT" + , connRollback = ignoreExceptions . helper "ROLLBACK" + , connEscapeFieldName = escape . unFieldNameDB + , connEscapeTableName = escape . unEntityNameDB . getEntityDBName + , connEscapeRawName = escape + , connNoLimit = "LIMIT -1" + , connRDBMS = "sqlite" + , connLimitOffset = decorateSQLWithLimitOffset "LIMIT -1" + , connLogFunc = logFunc + } where helper t getter = do stmt <- getter t @@ -336,31 +338,31 @@ prepare' conn sql = do insertSql' :: EntityDef -> [PersistValue] -> InsertSqlResult insertSql' ent vals = - case entityPrimary ent of - Just _ -> + case getEntityId ent of + EntityIdNaturalKey _ -> ISRManyKeys sql vals where sql = T.concat [ "INSERT INTO " - , escapeE $ entityDB ent + , escapeE $ getEntityDBName ent , "(" , T.intercalate "," $ map (escapeF . fieldDB) cols , ") VALUES(" , T.intercalate "," (map (const "?") cols) , ")" ] - Nothing -> + EntityIdField fd -> ISRInsertGet ins sel where sel = T.concat [ "SELECT " - , escapeF $ fieldDB (entityId ent) + , escapeF $ fieldDB fd , " FROM " - , escapeE $ entityDB ent + , escapeE $ getEntityDBName ent , " WHERE _ROWID_=last_insert_rowid()" ] ins = T.concat [ "INSERT INTO " - , escapeE $ entityDB ent + , escapeE $ getEntityDBName ent , if null cols then " VALUES(null)" else T.concat @@ -375,7 +377,7 @@ insertSql' ent vals = notGenerated = isNothing . fieldGenerated cols = - filter notGenerated $ entityFields ent + filter notGenerated $ getEntityFields ent execute' :: Sqlite.Connection -> Sqlite.Statement -> [PersistValue] -> IO Int64 execute' conn stmt vals = flip finally (liftIO $ Sqlite.reset conn stmt) $ do @@ -441,7 +443,7 @@ migrate' allDefs getter val = do return $ Right sql where def = val - table = entityDB def + table = getEntityDBName def go = do x <- CL.head case x of @@ -454,44 +456,42 @@ migrate' allDefs getter val = do -- with the difference that an actual database isn't needed for it. mockMigration :: Migration -> IO () mockMigration mig = do - smap <- newIORef $ Map.empty - let sqlbackend = SqlBackend - { connPrepare = \_ -> do - return Statement - { stmtFinalize = return () - , stmtReset = return () - , stmtExecute = undefined - , stmtQuery = \_ -> return $ return () - } - , connStmtMap = smap - , connInsertSql = insertSql' - , connInsertManySql = Nothing - , connClose = undefined - , connMigrateSql = migrate' - , connBegin = \f _ -> helper "BEGIN" f - , connCommit = helper "COMMIT" - , connRollback = ignoreExceptions . helper "ROLLBACK" - , connEscapeFieldName = escape . unFieldNameDB - , connEscapeTableName = escape . unEntityNameDB . entityDB - , connEscapeRawName = escape - , connNoLimit = "LIMIT -1" - , connRDBMS = "sqlite" - , connLimitOffset = decorateSQLWithLimitOffset "LIMIT -1" - , connLogFunc = undefined - , connUpsertSql = undefined - , connPutManySql = undefined - , connMaxParams = Just 999 - , connRepsertManySql = Nothing - } - result = runReaderT . runWriterT . runWriterT $ mig - resp <- result sqlbackend - mapM_ TIO.putStrLn $ map snd $ snd resp - where - helper t getter = do - stmt <- getter t - _ <- stmtExecute stmt [] - stmtReset stmt - ignoreExceptions = E.handle (\(_ :: E.SomeException) -> return ()) + smap <- newIORef $ Map.empty + let sqlbackend = + setConnMaxParams 999 $ + mkSqlBackend MkSqlBackendArgs + { connPrepare = \_ -> do + return Statement + { stmtFinalize = return () + , stmtReset = return () + , stmtExecute = undefined + , stmtQuery = \_ -> return $ return () + } + , connStmtMap = smap + , connInsertSql = insertSql' + , connClose = undefined + , connMigrateSql = migrate' + , connBegin = \f _ -> helper "BEGIN" f + , connCommit = helper "COMMIT" + , connRollback = ignoreExceptions . helper "ROLLBACK" + , connEscapeFieldName = escape . unFieldNameDB + , connEscapeTableName = escape . unEntityNameDB . getEntityDBName + , connEscapeRawName = escape + , connNoLimit = "LIMIT -1" + , connRDBMS = "sqlite" + , connLimitOffset = decorateSQLWithLimitOffset "LIMIT -1" + , connLogFunc = undefined + } + result = runReaderT . runWriterT . runWriterT $ mig + resp <- result sqlbackend + mapM_ TIO.putStrLn $ map snd $ snd resp + where + helper t getter = do + stmt <- getter t + _ <- stmtExecute stmt [] + stmtReset stmt + ignoreExceptions = + E.handle (\(_ :: E.SomeException) -> return ()) -- | Check if a column name is listed as the "safe to remove" in the entity -- list. @@ -499,7 +499,7 @@ safeToRemove :: EntityDef -> FieldNameDB -> Bool safeToRemove def (FieldNameDB colName) = any (elem FieldAttrSafeToRemove . fieldAttrs) $ filter ((== FieldNameDB colName) . fieldDB) - $ entityFields def + $ getEntityFieldsDatabase def getCopyTable :: [EntityDef] -> (Text -> IO Statement) @@ -527,12 +527,12 @@ getCopyTable allDefs getter def = do names <- getCols return $ name : names Just y -> error $ "Invalid result from PRAGMA table_info: " ++ show y - table = entityDB def + table = getEntityDBName def tableTmp = EntityNameDB $ unEntityNameDB table <> "_backup" (cols, uniqs, fdef) = sqliteMkColumns allDefs def cols' = filter (not . safeToRemove def . cName) cols newSql = mkCreateTable False def (cols', uniqs, fdef) - tmpSql = mkCreateTable True def { entityDB = tableTmp } (cols', uniqs, []) + tmpSql = mkCreateTable True (setEntityDBName tableTmp def) (cols', uniqs, []) dropTmp = "DROP TABLE " <> escapeE tableTmp dropOld = "DROP TABLE " <> escapeE table copyToTemp common = T.concat @@ -562,7 +562,7 @@ mkCreateTable isTemp entity (cols, uniqs, fdefs) = [ "CREATE" , if isTemp then " TEMP" else "" , " TABLE " - , escapeE $ entityDB entity + , escapeE $ getEntityDBName entity , "(" ] @@ -572,25 +572,25 @@ mkCreateTable isTemp entity (cols, uniqs, fdefs) = , ")" ] - columns = case entityPrimary entity of - Just pdef -> + columns = case getEntityId entity of + EntityIdNaturalKey pdef -> [ T.drop 1 $ T.concat $ map (sqlColumn isTemp) cols , ", PRIMARY KEY " , "(" - , T.intercalate "," $ map (escapeF . fieldDB) $ compositeFields pdef + , T.intercalate "," $ map (escapeF . fieldDB) $ toList $ compositeFields pdef , ")" ] - Nothing -> - [ escapeF $ fieldDB (entityId entity) + EntityIdField fd -> + [ escapeF $ fieldDB fd , " " - , showSqlType $ fieldSqlType $ entityId entity + , showSqlType $ fieldSqlType fd , " PRIMARY KEY" - , mayDefault $ defaultAttribute $ fieldAttrs $ entityId entity + , mayDefault $ defaultAttribute $ fieldAttrs fd , T.concat $ map (sqlColumn isTemp) nonIdCols ] - nonIdCols = filter (\c -> cName c /= fieldDB (entityId entity)) cols + nonIdCols = filter (\c -> Just (cName c) /= fmap fieldDB (getEntityIdField entity)) cols mayDefault :: Maybe Text -> Text mayDefault def = case def of @@ -652,7 +652,7 @@ sqlUnique (UniqueDef _ cname cols _) = T.concat [ ",CONSTRAINT " , escapeC cname , " UNIQUE (" - , T.intercalate "," $ map (escapeF . snd) cols + , T.intercalate "," $ map (escapeF . snd) $ toList cols , ")" ] @@ -674,16 +674,16 @@ escape s = go c = T.singleton c putManySql :: EntityDef -> Int -> Text -putManySql ent n = putManySql' conflictColumns fields ent n +putManySql ent n = putManySql' conflictColumns (toList fields) ent n where - fields = entityFields ent - conflictColumns = concatMap (map (escapeF . snd) . uniqueFields) (entityUniques ent) + fields = getEntityFieldsDatabase ent + conflictColumns = concatMap (map (escapeF . snd) . toList . uniqueFields) (getEntityUniques ent) repsertManySql :: EntityDef -> Int -> Text -repsertManySql ent n = putManySql' conflictColumns fields ent n +repsertManySql ent n = putManySql' conflictColumns (toList fields) ent n where fields = keyAndEntityFields ent - conflictColumns = escapeF . fieldDB <$> entityKeyFields ent + conflictColumns = escapeF . fieldDB <$> toList (getEntityKeyFields ent) putManySql' :: [Text] -> [FieldDef] -> EntityDef -> Int -> Text putManySql' conflictColumns fields ent n = q @@ -691,7 +691,7 @@ putManySql' conflictColumns fields ent n = q fieldDbToText = escapeF . fieldDB mkAssignment f = T.concat [f, "=EXCLUDED.", f] - table = escapeE . entityDB $ ent + table = escapeE . getEntityDBName $ ent columns = Util.commaSeparated $ map fieldDbToText fields placeholders = map (const "?") fields updates = map (mkAssignment . fieldDbToText) fields diff --git a/persistent-sqlite/persistent-sqlite.cabal b/persistent-sqlite/persistent-sqlite.cabal index 81c0ab452..41728af7f 100644 --- a/persistent-sqlite/persistent-sqlite.cabal +++ b/persistent-sqlite/persistent-sqlite.cabal @@ -1,5 +1,5 @@ name: persistent-sqlite -version: 2.12.0.0 +version: 2.13.0.0 license: MIT license-file: LICENSE author: Michael Snoyman @@ -44,7 +44,7 @@ flag use-stat4 library build-depends: base >= 4.9 && < 5 - , persistent >= 2.12 && < 3 + , persistent >= 2.13 && < 3 , aeson >= 1.0 , bytestring >= 0.10 , conduit >= 1.2.12 @@ -114,7 +114,9 @@ test-suite test type: exitcode-stdio-1.0 main-is: main.hs hs-source-dirs: test - other-modules: SqliteInit + other-modules: + SqliteInit + Database.Persist.Sqlite.CompositeSpec ghc-options: -Wall build-depends: base >= 4.9 && < 5 diff --git a/persistent-sqlite/test/Database/Persist/Sqlite/CompositeSpec.hs b/persistent-sqlite/test/Database/Persist/Sqlite/CompositeSpec.hs new file mode 100644 index 000000000..e110de7c1 --- /dev/null +++ b/persistent-sqlite/test/Database/Persist/Sqlite/CompositeSpec.hs @@ -0,0 +1,94 @@ +{-# LANGUAGE DeriveGeneric #-} +{-# LANGUAGE DataKinds #-} +{-# LANGUAGE FlexibleContexts #-} +{-# LANGUAGE FlexibleInstances #-} +{-# LANGUAGE ExistentialQuantification #-} +{-# LANGUAGE GeneralizedNewtypeDeriving #-} +{-# LANGUAGE MultiParamTypeClasses #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE QuasiQuotes #-} +{-# LANGUAGE ScopedTypeVariables #-} +{-# LANGUAGE TemplateHaskell #-} +{-# LANGUAGE TypeFamilies #-} +{-# LANGUAGE UndecidableInstances #-} +{-# LANGUAGE StandaloneDeriving #-} +{-# LANGUAGE DerivingStrategies #-} +{-# OPTIONS_GHC -fno-warn-unused-top-binds #-} + +module Database.Persist.Sqlite.CompositeSpec where + +import SqliteInit + +import Control.Monad.Reader (MonadReader) +import Control.Monad.Trans.Resource (MonadResource) +import qualified Data.Conduit.List as CL +import Conduit +import Database.Persist.Sqlite +import System.IO (hClose) +import Control.Exception (handle, IOException, throwIO) +import System.IO.Temp (withSystemTempFile) +import qualified Data.Text as T +import qualified Lens.Micro as Lens + +share [mkPersist sqlSettings, mkMigrate "compositeSetup"] [persistLowerCase| +SimpleComposite + int Int + text Text + Primary text int + deriving Show Eq + +SimpleCompositeReference + int Int + text Text + label Text + Foreign SimpleComposite fk_simple_composite text int + deriving Show Eq +|] + +share [mkPersist sqlSettings, mkMigrate "compositeMigrateTest"] [persistLowerCase| +SimpleComposite2 sql=simple_composite + int Int + text Text + new Int default=0 + Primary text int + deriving Show Eq + +SimpleCompositeReference2 sql=simple_composite_reference + int Int + text Text + label Text + Foreign SimpleComposite2 fk_simple_composite text int + deriving Show Eq +|] + +spec :: Spec +spec = describe "CompositeSpec" $ do + it "properly migrates to a composite primary key (issue #669)" $ asIO $ runSqliteInfo (mkSqliteConnectionInfo ":memory:") $ do + void $ runMigrationSilent compositeSetup + void $ runMigrationSilent compositeMigrateTest + pure () + it "test migrating sparse composite primary keys (issue #1184)" $ asIO $ withSystemTempFile "test564.sqlite3"$ \fp h -> do + hClose h + let connInfo = Lens.set fkEnabled False $ mkSqliteConnectionInfo (T.pack fp) + + runSqliteInfo connInfo $ do + void $ runMigrationSilent compositeSetup + forM_ [(1,"foo"),(3,"bar")] $ \(intKey, strKey) -> do + let key = SimpleCompositeKey strKey intKey + insertKey key (SimpleComposite intKey strKey) + insert (SimpleCompositeReference intKey strKey "test") + + validateForeignKeys + + runSqliteInfo connInfo $ do + void $ runMigrationSilent compositeMigrateTest + validateForeignKeys + + +validateForeignKeys + :: (MonadResource m, MonadReader env m, BackendCompatible SqlBackend env) + => m () +validateForeignKeys = do + violations <- map (T.pack . show) <$> runConduit (checkForeignKeys .| CL.consume) + unless (null violations) . liftIO . throwIO $ + PersistForeignConstraintUnmet (T.unlines violations) diff --git a/persistent-sqlite/test/SqliteInit.hs b/persistent-sqlite/test/SqliteInit.hs index 47595f80e..2c54ec8bd 100644 --- a/persistent-sqlite/test/SqliteInit.hs +++ b/persistent-sqlite/test/SqliteInit.hs @@ -84,21 +84,24 @@ _debugOn = False persistSettings :: MkPersistSettings persistSettings = sqlSettings { mpsGeneric = True } + type BackendMonad = SqlBackend sqlite_database_file :: Text sqlite_database_file = "testdb.sqlite3" + sqlite_database :: SqliteConnectionInfo sqlite_database = mkSqliteConnectionInfo sqlite_database_file + runConn :: MonadUnliftIO m => SqlPersistT (LoggingT m) t -> m () runConn f = do - travis <- liftIO isTravis - let debugPrint = not travis && _debugOn - let printDebug = if debugPrint then print . fromLogStr else void . return - flip runLoggingT (\_ _ _ s -> printDebug s) $ do - _<-withSqlitePoolInfo sqlite_database 1 $ runSqlPool f - return () + travis <- liftIO isTravis + let debugPrint = not travis && _debugOn + let printDebug = if debugPrint then print . fromLogStr else void . return + void $ flip runLoggingT (\_ _ _ s -> printDebug s) $ do + withSqlitePoolInfo sqlite_database 1 $ runSqlPool f db :: SqlPersistT (LoggingT (ResourceT IO)) () -> Assertion db actions = do - runResourceT $ runConn $ actions >> transactionUndo + runResourceT $ runConn $ actions >> transactionUndo + diff --git a/persistent-sqlite/test/main.hs b/persistent-sqlite/test/main.hs index dcfe7dd5b..77643a584 100644 --- a/persistent-sqlite/test/main.hs +++ b/persistent-sqlite/test/main.hs @@ -70,6 +70,7 @@ import Database.Persist.Sqlite import qualified Database.Sqlite as Sqlite import PersistentTestModels +import qualified Database.Persist.Sqlite.CompositeSpec as CompositeSpec import qualified MigrationTest type Tuple = (,) @@ -93,37 +94,6 @@ DataTypeTable no-json utc UTCTime |] -share [mkPersist sqlSettings, mkMigrate "compositeSetup"] [persistLowerCase| -SimpleComposite - int Int - text Text - Primary text int - deriving Show Eq - -SimpleCompositeReference - int Int - text Text - label Text - Foreign SimpleComposite fk_simple_composite text int - deriving Show Eq -|] - -share [mkPersist sqlSettings, mkMigrate "compositeMigrateTest"] [persistLowerCase| -SimpleComposite2 sql=simple_composite - int Int - text Text - new Int default=0 - Primary text int - deriving Show Eq - -SimpleCompositeReference2 sql=simple_composite_reference - int Int - text Text - label Text - Foreign SimpleComposite2 fk_simple_composite text int - deriving Show Eq -|] - share [mkPersist sqlSettings, mkMigrate "idSetup"] [persistLowerCase| Simple text Text @@ -176,165 +146,123 @@ setup migration = do main :: IO () main = do - handle (\(_ :: IOException) -> return ()) - $ removeFile $ fromText sqlite_database_file - - runConn $ do - mapM_ setup - [ ForeignKey.compositeMigrate - , PersistentTest.testMigrate - , PersistentTest.noPrefixMigrate - , PersistentTest.customPrefixMigrate - , EmbedTest.embedMigrate - , EmbedOrderTest.embedOrderMigrate - , LargeNumberTest.numberMigrate - , UniqueTest.uniqueMigrate - , MaxLenTest.maxlenMigrate - , Recursive.recursiveMigrate - , CompositeTest.compositeMigrate - , MigrationTest.migrationMigrate - , PersistUniqueTest.migration - , RenameTest.migration - , CustomPersistFieldTest.customFieldMigrate - , PrimaryTest.migration - , CustomPrimaryKeyReferenceTest.migration - , MigrationColumnLengthTest.migration - , TransactionLevelTest.migration - , LongIdentifierTest.migration - ] - PersistentTest.cleanDB - ForeignKey.cleanDB - - hspec $ do - RenameTest.specsWith db - DataTypeTest.specsWith - db - (Just (runMigrationSilent dataTypeMigrate)) - [ TestFn "text" dataTypeTableText - , TestFn "textMaxLen" dataTypeTableTextMaxLen - , TestFn "bytes" dataTypeTableBytes - , TestFn "bytesTextTuple" dataTypeTableBytesTextTuple - , TestFn "bytesMaxLen" dataTypeTableBytesMaxLen - , TestFn "int" dataTypeTableInt - , TestFn "intList" dataTypeTableIntList - , TestFn "intMap" dataTypeTableIntMap - , TestFn "bool" dataTypeTableBool - , TestFn "day" dataTypeTableDay - , TestFn "time" (DataTypeTest.roundTime . dataTypeTableTime) - , TestFn "utc" (DataTypeTest.roundUTCTime . dataTypeTableUtc) - ] - [ ("pico", dataTypeTablePico) ] - dataTypeTableDouble - HtmlTest.specsWith - db - (Just (runMigrationSilent HtmlTest.htmlMigrate)) - EmbedTest.specsWith db - EmbedOrderTest.specsWith db - LargeNumberTest.specsWith db - UniqueTest.specsWith db - MaxLenTest.specsWith db - Recursive.specsWith db - SumTypeTest.specsWith db (Just (runMigrationSilent SumTypeTest.sumTypeMigrate)) - MigrationOnlyTest.specsWith db - (Just - $ runMigrationSilent MigrationOnlyTest.migrateAll1 - >> runMigrationSilent MigrationOnlyTest.migrateAll2 - ) - PersistentTest.specsWith db - PersistentTest.filterOrSpecs db - ReadWriteTest.specsWith db - RawSqlTest.specsWith db - UpsertTest.specsWith - db - UpsertTest.Don'tUpdateNull - UpsertTest.UpsertPreserveOldKey - - MpsNoPrefixTest.specsWith db - MpsCustomPrefixTest.specsWith db - EmptyEntityTest.specsWith db (Just (runMigrationSilent EmptyEntityTest.migration)) - CompositeTest.specsWith db - PersistUniqueTest.specsWith db - PrimaryTest.specsWith db - CustomPersistFieldTest.specsWith db - CustomPrimaryKeyReferenceTest.specsWith db - MigrationColumnLengthTest.specsWith db - EquivalentTypeTest.specsWith db - ForeignKey.specsWith db - TransactionLevelTest.specsWith db - MigrationTest.specsWith db - LongIdentifierTest.specsWith db - GeneratedColumnTestSQL.specsWith db - - it "issue #328" $ asIO $ runSqliteInfo (mkSqliteConnectionInfo ":memory:") $ do - void $ runMigrationSilent migrateAll - insert_ . Test $ read "2014-11-30 05:15:25.123Z" - [Single x] <- rawSql "select strftime('%s%f',time) from test" [] - liftIO $ x `shouldBe` Just ("141732452525.123" :: String) - it "issue #339" $ asIO $ runSqliteInfo (mkSqliteConnectionInfo ":memory:") $ do - void $ runMigrationSilent migrateAll - now <- liftIO getCurrentTime - tid <- insert $ Test now - Just (Test now') <- get tid - liftIO $ now' `shouldBe` now - it "issue #564" $ asIO $ withSystemTempFile "test564.sqlite3"$ \fp h -> do - hClose h - conn <- Sqlite.open (T.pack fp) - Sqlite.close conn - return () - it "issue #527" $ asIO $ runSqliteInfo (mkSqliteConnectionInfo ":memory:") $ do - void $ runMigrationSilent migrateAll - insertMany_ $ replicate 1000 (Test $ read "2014-11-30 05:15:25.123Z") - - it "properly migrates to a composite primary key (issue #669)" $ asIO $ runSqliteInfo (mkSqliteConnectionInfo ":memory:") $ do - void $ runMigrationSilent compositeSetup - void $ runMigrationSilent compositeMigrateTest - pure () - - it "test migrating sparse primary keys (issue #1184)" $ asIO $ withSystemTempFile "test564.sqlite3"$ \fp h -> do - hClose h - let connInfo = Lens.set fkEnabled False $ mkSqliteConnectionInfo (T.pack fp) - runSqliteInfo connInfo $ do - void $ runMigrationSilent idSetup - forM_ (map toSqlKey [1,3]) $ \key -> do - insertKey key (Simple "foo") - insert (SimpleReference key "test") - - validateForeignKeys - - runSqliteInfo connInfo $ do - void $ runMigrationSilent idMigrateTest - validateForeignKeys - - it "test migrating sparse composite primary keys (issue #1184)" $ asIO $ withSystemTempFile "test564.sqlite3"$ \fp h -> do - hClose h - let connInfo = Lens.set fkEnabled False $ mkSqliteConnectionInfo (T.pack fp) - - runSqliteInfo connInfo $ do - void $ runMigrationSilent compositeSetup - forM_ [(1,"foo"),(3,"bar")] $ \(intKey, strKey) -> do - let key = SimpleCompositeKey strKey intKey - insertKey key (SimpleComposite intKey strKey) - insert (SimpleCompositeReference intKey strKey "test") - - validateForeignKeys - - runSqliteInfo connInfo $ do - void $ runMigrationSilent compositeMigrateTest - validateForeignKeys - - it "afterException" $ asIO $ runSqliteInfo (mkSqliteConnectionInfo ":memory:") $ do - void $ runMigrationSilent testMigrate - let catcher :: forall m. Monad m => SomeException -> m () - catcher _ = return () - insert_ $ Person "A" 0 Nothing - insert_ (Person "A" 1 Nothing) `catch` catcher - insert_ $ Person "B" 0 Nothing - return () - -validateForeignKeys - :: (MonadResource m, MonadReader env m, BackendCompatible SqlBackend env) - => m () -validateForeignKeys = do - violations <- map (T.pack . show) <$> runConduit (checkForeignKeys .| CL.consume) - unless (null violations) . liftIO . throwIO $ - PersistForeignConstraintUnmet (T.unlines violations) + handle (\(_ :: IOException) -> return ()) + $ removeFile $ fromText sqlite_database_file + + runConn $ do + mapM_ setup + [ ForeignKey.compositeMigrate + , PersistentTest.testMigrate + , PersistentTest.noPrefixMigrate + , PersistentTest.customPrefixMigrate + , EmbedTest.embedMigrate + , EmbedOrderTest.embedOrderMigrate + , LargeNumberTest.numberMigrate + , UniqueTest.uniqueMigrate + , MaxLenTest.maxlenMigrate + , Recursive.recursiveMigrate + , CompositeTest.compositeMigrate + , MigrationTest.migrationMigrate + , PersistUniqueTest.migration + , RenameTest.migration + , CustomPersistFieldTest.customFieldMigrate + , PrimaryTest.migration + , CustomPrimaryKeyReferenceTest.migration + , MigrationColumnLengthTest.migration + , TransactionLevelTest.migration + , LongIdentifierTest.migration + ] + PersistentTest.cleanDB + ForeignKey.cleanDB + + + hspec $ do + describe "Database" $ describe "Persist" $ describe "Sqlite" $ do + CompositeSpec.spec + RenameTest.specsWith db + DataTypeTest.specsWith + db + (Just (runMigrationSilent dataTypeMigrate)) + [ TestFn "text" dataTypeTableText + , TestFn "textMaxLen" dataTypeTableTextMaxLen + , TestFn "bytes" dataTypeTableBytes + , TestFn "bytesTextTuple" dataTypeTableBytesTextTuple + , TestFn "bytesMaxLen" dataTypeTableBytesMaxLen + , TestFn "int" dataTypeTableInt + , TestFn "intList" dataTypeTableIntList + , TestFn "intMap" dataTypeTableIntMap + , TestFn "bool" dataTypeTableBool + , TestFn "day" dataTypeTableDay + , TestFn "time" (DataTypeTest.roundTime . dataTypeTableTime) + , TestFn "utc" (DataTypeTest.roundUTCTime . dataTypeTableUtc) + ] + [ ("pico", dataTypeTablePico) ] + dataTypeTableDouble + HtmlTest.specsWith + db + (Just (runMigrationSilent HtmlTest.htmlMigrate)) + EmbedTest.specsWith db + EmbedOrderTest.specsWith db + LargeNumberTest.specsWith db + UniqueTest.specsWith db + MaxLenTest.specsWith db + Recursive.specsWith db + SumTypeTest.specsWith db (Just (runMigrationSilent SumTypeTest.sumTypeMigrate)) + MigrationOnlyTest.specsWith db + (Just + $ runMigrationSilent MigrationOnlyTest.migrateAll1 + >> runMigrationSilent MigrationOnlyTest.migrateAll2 + ) + PersistentTest.specsWith db + PersistentTest.filterOrSpecs db + ReadWriteTest.specsWith db + RawSqlTest.specsWith db + UpsertTest.specsWith + db + UpsertTest.Don'tUpdateNull + UpsertTest.UpsertPreserveOldKey + + MpsNoPrefixTest.specsWith db + MpsCustomPrefixTest.specsWith db + EmptyEntityTest.specsWith db (Just (runMigrationSilent EmptyEntityTest.migration)) + CompositeTest.specsWith db + PersistUniqueTest.specsWith db + PrimaryTest.specsWith db + CustomPersistFieldTest.specsWith db + CustomPrimaryKeyReferenceTest.specsWith db + MigrationColumnLengthTest.specsWith db + EquivalentTypeTest.specsWith db + ForeignKey.specsWith db + TransactionLevelTest.specsWith db + MigrationTest.specsWith db + LongIdentifierTest.specsWith db + GeneratedColumnTestSQL.specsWith db + + it "issue #328" $ asIO $ runSqliteInfo (mkSqliteConnectionInfo ":memory:") $ do + void $ runMigrationSilent migrateAll + insert_ . Test $ read "2014-11-30 05:15:25.123Z" + [Single x] <- rawSql "select strftime('%s%f',time) from test" [] + liftIO $ x `shouldBe` Just ("141732452525.123" :: String) + it "issue #339" $ asIO $ runSqliteInfo (mkSqliteConnectionInfo ":memory:") $ do + void $ runMigrationSilent migrateAll + now <- liftIO getCurrentTime + tid <- insert $ Test now + Just (Test now') <- get tid + liftIO $ now' `shouldBe` now + it "issue #564" $ asIO $ withSystemTempFile "test564.sqlite3"$ \fp h -> do + hClose h + conn <- Sqlite.open (T.pack fp) + Sqlite.close conn + return () + it "issue #527" $ asIO $ runSqliteInfo (mkSqliteConnectionInfo ":memory:") $ do + void $ runMigrationSilent migrateAll + insertMany_ $ replicate 1000 (Test $ read "2014-11-30 05:15:25.123Z") + + it "afterException" $ asIO $ runSqliteInfo (mkSqliteConnectionInfo ":memory:") $ do + void $ runMigrationSilent testMigrate + let catcher :: forall m. Monad m => SomeException -> m () + catcher _ = return () + insert_ $ Person "A" 0 Nothing + insert_ (Person "A" 1 Nothing) `catch` catcher + insert_ $ Person "B" 0 Nothing + return () diff --git a/persistent-test/ChangeLog.md b/persistent-test/ChangeLog.md index 11de8b423..35abcb958 100644 --- a/persistent-test/ChangeLog.md +++ b/persistent-test/ChangeLog.md @@ -1,5 +1,10 @@ ## Unreleased changes +## 2.13.0.0 (unreleased) + +* [#1225](https://github.com/yesodweb/persistent/pull/1225) + * Support `persistent-2.13` changes for SqlBackend being made internal. + ## 2.12.0.0 * Decomposed `HaskellName` into `ConstraintNameHS`, `EntityNameHS`, `FieldNameHS`. Decomposed `DBName` into `ConstraintNameDB`, `EntityNameDB`, `FieldNameDB` respectively. [#1174](https://github.com/yesodweb/persistent/pull/1174) diff --git a/persistent-test/persistent-test.cabal b/persistent-test/persistent-test.cabal index 31d232c8c..afcf75d7a 100644 --- a/persistent-test/persistent-test.cabal +++ b/persistent-test/persistent-test.cabal @@ -1,5 +1,5 @@ name: persistent-test -version: 2.12.0.0 +version: 2.13.0.0 license: MIT license-file: LICENSE author: Michael Snoyman @@ -60,7 +60,6 @@ library build-depends: base >= 4.9 && < 5 - , persistent >= 2.12 && < 2.13 , aeson >= 1.0 , blaze-html >= 0.9 , bytestring >= 0.10 @@ -69,11 +68,13 @@ library , exceptions >= 0.8 , hspec >= 2.4 , hspec-expectations + , http-api-data , HUnit , monad-control , monad-logger >= 0.3.25 , mtl , path-pieces >= 0.2 + , persistent >= 2.13 && < 2.14 , QuickCheck >= 2.9 , quickcheck-instances >= 0.3 , random >= 1.1 diff --git a/persistent-test/src/CompositeTest.hs b/persistent-test/src/CompositeTest.hs index af8a77787..2ec18f726 100644 --- a/persistent-test/src/CompositeTest.hs +++ b/persistent-test/src/CompositeTest.hs @@ -7,12 +7,11 @@ module CompositeTest where import qualified Data.Map as Map import Data.Maybe (isJust) -import Database.Persist.TH (mkDeleteCascade) import Init -- mpsGeneric = False is due to a bug or at least lack of a feature in mkKeyTypeDec TH.hs -share [mkPersist persistSettings { mpsGeneric = False }, mkMigrate "compositeMigrate", mkDeleteCascade persistSettings { mpsGeneric = False }] [persistLowerCase| +share [mkPersist persistSettings { mpsGeneric = False }, mkMigrate "compositeMigrate"] [persistLowerCase| TestParent name String maxlen=20 name2 String maxlen=20 @@ -233,6 +232,8 @@ specsWith runDb = describe "composite" $ it "RawSql Entity instance" $ runDb $ do key <- insert p1 + Just x <- get key + x @== p1 newp1 <- rawSql "SELECT ?? FROM test_parent LIMIT 1" [] [Entity key p1] @== newp1 diff --git a/persistent-test/src/ForeignKey.hs b/persistent-test/src/ForeignKey.hs index 863661478..fa1250604 100644 --- a/persistent-test/src/ForeignKey.hs +++ b/persistent-test/src/ForeignKey.hs @@ -8,6 +8,8 @@ import Data.Proxy import qualified Data.List as List import Init +import Database.Persist.EntityDef.Internal (entityExtra) + -- mpsGeneric = False is due to a bug or at least lack of a feature in mkKeyTypeDec TH.hs share [mkPersist persistSettings { mpsGeneric = False }, mkMigrate "compositeMigrate"] [persistLowerCase| SimpleCascadeChild @@ -204,7 +206,7 @@ specsWith runDb = describe "foreign keys options" $ do , fcOnDelete = Just Cascade } Just refField = - List.find isRefCol (entityFields ed) + List.find isRefCol (getEntityFields ed) it "parses into fieldCascade" $ do fieldCascade refField `shouldBe` expected diff --git a/persistent-test/src/GeneratedColumnTestSQL.hs b/persistent-test/src/GeneratedColumnTestSQL.hs index 0803dd1fd..2eac96d5a 100644 --- a/persistent-test/src/GeneratedColumnTestSQL.hs +++ b/persistent-test/src/GeneratedColumnTestSQL.hs @@ -6,7 +6,7 @@ module GeneratedColumnTestSQL (specsWith) where import Database.Persist.TH import Init -share [mkPersist sqlSettings, mkMigrate "migrate1", mkDeleteCascade sqlSettings] [persistLowerCase| +share [mkPersist sqlSettings, mkMigrate "migrate1"] [persistLowerCase| GenTest sql=gen_test fieldOne Text Maybe fieldTwo Text Maybe @@ -18,7 +18,7 @@ MigrateTestV1 sql=gen_migrate_test cromulence Int generated=5 |] -share [mkPersist sqlSettings, mkMigrate "migrate2", mkDeleteCascade sqlSettings] [persistLowerCase| +share [mkPersist sqlSettings, mkMigrate "migrate2"] [persistLowerCase| MigrateTestV2 sql=gen_migrate_test sickness Int generated=3 cromulence Int diff --git a/persistent-test/src/Init.hs b/persistent-test/src/Init.hs index 471be0a49..62bb4fc84 100644 --- a/persistent-test/src/Init.hs +++ b/persistent-test/src/Init.hs @@ -1,6 +1,8 @@ -{-# LANGUAGE ConstraintKinds #-} {-# LANGUAGE CPP #-} +{-# LANGUAGE ConstraintKinds #-} {-# LANGUAGE GADTs #-} +{-# LANGUAGE GeneralizedNewtypeDeriving #-} +{-# LANGUAGE TypeApplications #-} {-# LANGUAGE UndecidableInstances #-} {-# OPTIONS_GHC -fno-warn-orphans #-} @@ -42,14 +44,16 @@ module Init ( , liftA2 , changeBackend , Proxy(..) + , UUID(..) + , sqlSettingsUuid ) where #if !MIN_VERSION_monad_logger(0,3,30) -- Needed for GHC versions 7.10.3. Can drop when we drop support for GHC -- 7.10.3 +import qualified Control.Monad.Fail as MonadFail import Control.Monad.IO.Class import Control.Monad.Logger -import qualified Control.Monad.Fail as MonadFail #endif -- needed for backwards compatibility @@ -64,21 +68,35 @@ import Control.Monad.Trans.Resource.Internal -- re-exports import Control.Applicative (liftA2, (<|>)) import Control.Exception (SomeException) -import Control.Monad (void, replicateM, liftM, when, forM_) +import Control.Monad (forM_, liftM, replicateM, void, when) import Control.Monad.Fail (MonadFail) import Control.Monad.Reader -import Data.Char (generalCategory, GeneralCategory(..)) -import Data.Fixed (Pico,Micro) +import Data.Char (GeneralCategory(..), generalCategory) +import Data.Fixed (Micro, Pico) +import Data.Proxy import qualified Data.Text as T import Data.Time import Test.Hspec import Test.QuickCheck.Instances () -import Data.Proxy -import Database.Persist.TH (mkPersist, mkMigrate, share, sqlSettings, persistLowerCase, persistUpperCase, MkPersistSettings(..)) +import Data.Aeson (FromJSON, ToJSON, Value(..)) +import qualified Data.Text.Encoding as TE +import Database.Persist.ImplicitIdDef (mkImplicitIdDef) +import Database.Persist.TH + ( MkPersistSettings(..) + , mkMigrate + , mkPersist + , persistLowerCase + , persistUpperCase + , setImplicitIdDef + , share + , sqlSettings + ) +import Web.Internal.HttpApiData +import Web.PathPieces -- testing -import Test.HUnit ((@?=),(@=?), Assertion, assertFailure, assertBool) +import Test.HUnit (Assertion, assertBool, assertFailure, (@=?), (@?=)) import Test.QuickCheck import Control.Monad (unless, (>=>)) @@ -247,3 +265,34 @@ instance MonadBaseControl b m => MonadBaseControl b (ResourceT m) where f $ runInBase . (\(ResourceT r) -> r reader') restoreM = ResourceT . const . restoreM #endif + +-- * For implicit ID spec + +newtype UUID = UUID { unUUID :: Text } + deriving stock + (Show, Eq, Ord, Read) + deriving newtype + (ToJSON, FromJSON, FromHttpApiData, ToHttpApiData, PathPiece) + +instance PersistFieldSql UUID where + sqlType _ = SqlOther "UUID" + +instance PersistField UUID where + toPersistValue (UUID txt) = + PersistLiteral_ Escaped (TE.encodeUtf8 txt) + fromPersistValue pv = + case pv of + PersistLiteral_ Escaped bs -> + Right $ UUID (TE.decodeUtf8 bs) + _ -> + Left "Nope" + +sqlSettingsUuid :: Text -> MkPersistSettings +sqlSettingsUuid defExpr = + let + uuidDef = + mkImplicitIdDef @UUID defExpr + settings = + setImplicitIdDef uuidDef sqlSettings + in + settings diff --git a/persistent-test/src/LongIdentifierTest.hs b/persistent-test/src/LongIdentifierTest.hs index b8fe3e808..85a6abf22 100644 --- a/persistent-test/src/LongIdentifierTest.hs +++ b/persistent-test/src/LongIdentifierTest.hs @@ -18,7 +18,7 @@ import Init -- This test creates very long identifier names. The generated foreign key is over the length limit for Postgres and MySQL -- persistent-postgresql handles this by truncating foreign key names using the same algorithm that Postgres itself does (see 'refName' in Postgresql.hs) -- MySQL currently doesn't run this test, and needs truncation logic for it to pass. -share [mkPersist sqlSettings, mkMigrate "migration", mkDeleteCascade sqlSettings] [persistLowerCase| +share [mkPersist sqlSettings, mkMigrate "migration"] [persistLowerCase| TableAnExtremelyFantasticallySuperLongNameParent field1 Int TableAnExtremelyFantasticallySuperLongNameChild diff --git a/persistent-test/src/MigrationOnlyTest.hs b/persistent-test/src/MigrationOnlyTest.hs index 2240b9045..850f2aec8 100644 --- a/persistent-test/src/MigrationOnlyTest.hs +++ b/persistent-test/src/MigrationOnlyTest.hs @@ -1,11 +1,14 @@ -{-# LANGUAGE UndecidableInstances #-} +{-# LANGUAGE TypeApplications, UndecidableInstances #-} + {-# OPTIONS_GHC -Wno-unused-top-binds #-} + module MigrationOnlyTest (specsWith, migrateAll1, migrateAll2) where import qualified Data.Text as T import Database.Persist.TH import Init +import Database.Persist.EntityDef share [mkPersist sqlSettings { mpsGeneric = True }, mkMigrate "migrateAll1"] [persistLowerCase| TwoField1 sql=two_field @@ -15,7 +18,7 @@ TwoField1 sql=two_field deriving Eq Show |] -share [mkPersist sqlSettings { mpsGeneric = True }, mkMigrate "migrateAll2", mkDeleteCascade sqlSettings] [persistLowerCase| +share [mkPersist sqlSettings { mpsGeneric = True }, mkMigrate "migrateAll2"] [persistLowerCase| TwoField field1 Int field2 T.Text @@ -33,6 +36,27 @@ specsWith -> Maybe (ReaderT backend m a) -> Spec specsWith runDb mmigrate = describe "MigrationOnly field" $ do + let + edef = + entityDef $ Proxy @TwoField + describe "getEntityFields" $ do + let + fields = + getEntityFields edef + it "should have two fields" $ do + length fields `shouldBe` 2 + it "should not have any migration only fields" $ do + fields `shouldSatisfy` all isHaskellField + + describe "getEntityFieldsDatabase" $ do + let + fields = + getEntityFieldsDatabase edef + it "should have three fields" $ do + length fields `shouldBe` 3 + it "should have at one migration only field" $ do + length (filter (not . isHaskellField) fields) `shouldBe` 1 + it "doesn't have the field in the Haskell entity" $ asIO $ runDb $ do sequence_ mmigrate sequence_ mmigrate diff --git a/persistent-test/src/MigrationTest.hs b/persistent-test/src/MigrationTest.hs index 40ec86001..7ee8255e0 100644 --- a/persistent-test/src/MigrationTest.hs +++ b/persistent-test/src/MigrationTest.hs @@ -7,7 +7,7 @@ import qualified Data.Text as T import Init -share [mkPersist sqlSettings, mkMigrate "migrationMigrate", mkDeleteCascade sqlSettings] [persistLowerCase| +share [mkPersist sqlSettings, mkMigrate "migrationMigrate"] [persistLowerCase| Target field1 Int field2 T.Text @@ -23,7 +23,7 @@ CustomSqlId Primary pk |] -share [mkPersist sqlSettings, mkMigrate "migrationAddCol", mkDeleteCascade sqlSettings] [persistLowerCase| +share [mkPersist sqlSettings, mkMigrate "migrationAddCol"] [persistLowerCase| Target1 sql=target field1 Int field2 T.Text diff --git a/persistent-test/src/PersistentTest.hs b/persistent-test/src/PersistentTest.hs index 09833ea8c..f1fb19e76 100644 --- a/persistent-test/src/PersistentTest.hs +++ b/persistent-test/src/PersistentTest.hs @@ -632,80 +632,11 @@ specsWith runDb = describe "persistent" $ do describe "documentation syntax" $ do let edef = entityDef (Proxy :: Proxy Relationship) it "provides comments on entity def" $ do - entityComments edef + getEntityComments edef `shouldBe` Just "This is a doc comment for a relationship.\nYou need to put the pipe character for each line of documentation.\nBut you can resume the doc comments afterwards.\n" it "provides comments on the field" $ do - let [nameField, _] = entityFields edef + let [nameField, _] = getEntityFields edef fieldComments nameField `shouldBe` Just "Fields should be documentable.\n" - - describe "JsonEncoding" $ do - let - subject = - JsonEncoding "Bob" 32 - subjectEntity = - Entity (JsonEncodingKey (jsonEncodingName subject)) subject - - it "encodes without an ID field" $ do - toJSON subjectEntity - `shouldBe` - Object (M.fromList - [ ("name", String "Bob") - , ("age", toJSON (32 :: Int)) - , ("id", String "Bob") - ]) - - it "decodes without an ID field" $ do - let - json_ = encode . Object . M.fromList $ - [ ("name", String "Bob") - , ("age", toJSON (32 :: Int)) - ] - decode json_ - `shouldBe` - Just subjectEntity - - prop "works with a Primary" $ \jsonEncoding -> do - let - ent = - Entity (JsonEncodingKey (jsonEncodingName jsonEncoding)) jsonEncoding - decode (encode ent) - `shouldBe` - Just ent - - prop "excuse me what" $ \j@JsonEncoding{..} -> do - let - ent = - Entity (JsonEncodingKey jsonEncodingName) j - toJSON ent - `shouldBe` - Object (M.fromList - [ ("name", toJSON jsonEncodingName) - , ("age", toJSON jsonEncodingAge) - , ("id", toJSON jsonEncodingName) - ]) - - prop "round trip works with composite key" $ \j@JsonEncoding2{..} -> do - let - key = JsonEncoding2Key jsonEncoding2Name jsonEncoding2Blood - ent = - Entity key j - decode (encode ent) - `shouldBe` - Just ent - - prop "works with a composite key" $ \j@JsonEncoding2{..} -> do - let - key = JsonEncoding2Key jsonEncoding2Name jsonEncoding2Blood - ent = - Entity key j - toJSON ent - `shouldBe` - Object (M.fromList - [ ("name", toJSON jsonEncoding2Name) - , ("age", toJSON jsonEncoding2Age) - , ("blood", toJSON jsonEncoding2Blood) - , ("id", toJSON key) - ]) diff --git a/persistent-test/src/PersistentTestModels.hs b/persistent-test/src/PersistentTestModels.hs index ee9c340fa..08ceec60d 100644 --- a/persistent-test/src/PersistentTestModels.hs +++ b/persistent-test/src/PersistentTestModels.hs @@ -6,6 +6,7 @@ module PersistentTestModels where import Data.Aeson +import qualified Data.List.NonEmpty as NEL import Data.Proxy import Test.QuickCheck import Database.Persist.Sql @@ -18,7 +19,7 @@ import Data.Text (append) -- just need to ensure this compiles import PersistentTestModelsImports() -share [mkPersist persistSettings { mpsGeneric = True }, mkMigrate "testMigrate", mkDeleteCascade persistSettings, mkSave "_ignoredSave"] [persistUpperCase| +share [mkPersist persistSettings { mpsGeneric = True }, mkMigrate "testMigrate"] [persistUpperCase| -- Dedented comment -- Header-level comment @@ -140,27 +141,6 @@ NoPrefix2 |] -share [mkMigrate "testNonGenericMigrate", mkPersist sqlSettings] [persistLowerCase| -JsonEncoding json - name Text - age Int - Primary name - deriving Show Eq - -JsonEncoding2 json - name Text - age Int - blood Text - Primary name blood - deriving Show Eq -|] - -instance Arbitrary JsonEncoding where - arbitrary = JsonEncoding <$> arbitrary <*> arbitrary - -instance Arbitrary JsonEncoding2 where - arbitrary = JsonEncoding2 <$> arbitrary <*> arbitrary <*> arbitrary - deriving instance Show (BackendKey backend) => Show (NoPrefix1Generic backend) deriving instance Eq (BackendKey backend) => Eq (NoPrefix1Generic backend) @@ -225,7 +205,7 @@ instance (PersistEntity a) => PersistEntity (ReverseFieldOrder a) where where unRfoProxy :: proxy (ReverseFieldOrder a) -> Proxy a unRfoProxy _ = Proxy - revFields ed = ed { entityFields = reverse (entityFields ed) } + revFields = overEntityFields reverse toPersistFields = reverse . toPersistFields . unRFO newtype EntityField (ReverseFieldOrder a) b = EFRFO {unEFRFO :: EntityField a b} @@ -233,9 +213,9 @@ instance (PersistEntity a) => PersistEntity (ReverseFieldOrder a) where fromPersistValues = fmap RFO . fromPersistValues . reverse newtype Unique (ReverseFieldOrder a) = URFO {unURFO :: Unique a } - persistUniqueToFieldNames = reverse . persistUniqueToFieldNames . unURFO + persistUniqueToFieldNames = NEL.reverse . persistUniqueToFieldNames . unURFO persistUniqueToValues = reverse . persistUniqueToValues . unURFO - persistUniqueKeys = map URFO . reverse . persistUniqueKeys . unRFO + persistUniqueKeys = fmap URFO . reverse . persistUniqueKeys . unRFO persistIdField = error "ReverseFieldOrder.persistIdField" fieldLens = error "ReverseFieldOrder.fieldLens" diff --git a/persistent-test/src/RawSqlTest.hs b/persistent-test/src/RawSqlTest.hs index 4c26cd65f..33f6e9d97 100644 --- a/persistent-test/src/RawSqlTest.hs +++ b/persistent-test/src/RawSqlTest.hs @@ -8,6 +8,7 @@ import qualified Data.Conduit.List as CL import qualified Data.Text as T import Init +import Database.Persist.SqlBackend import PersistTestPetType import PersistentTestModels @@ -141,7 +142,7 @@ specsWith runDb = describe "rawSql" $ do liftIO $ ret @?= [(Single (1::Int), Single (2::Int), Single (3::Int), Single (4::Int), Single (5::Int), Single (6::Int), Single (7::Int), Single (8::Int), Single (9::Int), Single (10::Int), Single (11::Int), Single (12::Int), Single (13::Int), Single (14::Int), Single (15::Int))] getEscape :: MonadReader SqlBackend m => m (Text -> Text) -getEscape = asks connEscapeRawName +getEscape = getEscapeRawNameFunction caseCommitRollback :: Runner SqlBackend m => RunDb SqlBackend m -> Assertion caseCommitRollback runDb = runDb $ do diff --git a/persistent-test/src/Recursive.hs b/persistent-test/src/Recursive.hs index 3173b4c37..1991692b4 100644 --- a/persistent-test/src/Recursive.hs +++ b/persistent-test/src/Recursive.hs @@ -1,16 +1,21 @@ {-# LANGUAGE UndecidableInstances #-} + {-# OPTIONS_GHC -Wno-unused-top-binds #-} + module Recursive (specsWith, recursiveMigrate, cleanup) where import Init share [mkPersist sqlSettings { mpsGeneric = True }, mkMigrate "recursiveMigrate"] [persistLowerCase| + SubType object [MenuObject] deriving Show Eq + MenuObject sub SubType Maybe deriving Show Eq + |] cleanup diff --git a/persistent-test/src/RenameTest.hs b/persistent-test/src/RenameTest.hs index 5491b8aa3..051497b8e 100644 --- a/persistent-test/src/RenameTest.hs +++ b/persistent-test/src/RenameTest.hs @@ -75,9 +75,9 @@ specsWith specsWith runDb = describe "rename specs" $ do describe "LowerCaseTable" $ do it "LowerCaseTable has the right sql name" $ do - fieldDB (entityId (entityDef (Proxy @LowerCaseTable))) + fmap fieldDB (getEntityIdField (entityDef (Proxy @LowerCaseTable))) `shouldBe` - FieldNameDB "my_id" + Just (FieldNameDB "my_id") it "user specified id, insertKey, no default=" $ runDb $ do let rec2 = IdTable "Foo2" Nothing @@ -92,7 +92,7 @@ specsWith runDb = describe "rename specs" $ do key' @== key it "extra blocks" $ - entityExtra (entityDef (Nothing :: Maybe LowerCaseTable)) @?= + getEntityExtra (entityDef (Nothing :: Maybe LowerCaseTable)) @?= Map.fromList [ ("ExtraBlock", map T.words ["foo bar", "baz", "bin"]) , ("ExtraBlock2", map T.words ["something"]) diff --git a/persistent-test/src/TreeTest.hs b/persistent-test/src/TreeTest.hs index 226468ccd..ce14f5c7c 100644 --- a/persistent-test/src/TreeTest.hs +++ b/persistent-test/src/TreeTest.hs @@ -5,15 +5,13 @@ module TreeTest where import Init -import Database.Persist.TH (mkDeleteCascade) - -- mpsGeneric = False is due to a bug or at least lack of a feature in -- mkKeyTypeDec TH.hs share [ mkPersist persistSettings { mpsGeneric = False } , mkMigrate "treeMigrate" - , mkDeleteCascade persistSettings { mpsGeneric = False } ] [persistLowerCase| + ] [persistLowerCase| Tree sql=trees name Text parent Text Maybe @@ -41,14 +39,14 @@ specsWith runDb = describe "tree" $ do gp <- getJust kgp treeFkparent gp @== Nothing describe "entityDef" $ do - let EntityDef{..} = entityDef (Proxy :: Proxy Tree) + let ed = entityDef (Proxy :: Proxy Tree) it "has the right haskell name" $ do - entityHaskell `shouldBe` EntityNameHS "Tree" + getEntityHaskellName ed `shouldBe` EntityNameHS "Tree" it "has the right DB name" $ do - entityDB `shouldBe` EntityNameDB "trees" + getEntityDBName ed `shouldBe` EntityNameDB "trees" describe "foreign ref" $ do - let [ForeignDef{..}] = entityForeigns (entityDef (Proxy :: Proxy Tree)) + let [ForeignDef{..}] = getEntityForeignDefs (entityDef (Proxy :: Proxy Tree)) it "has the right haskell name" $ do foreignRefTableHaskell `shouldBe` EntityNameHS "Tree" @@ -60,7 +58,7 @@ specsWith runDb = describe "tree" $ do ConstraintNameHS "fkparent" it "has the right DB constraint name" $ do foreignConstraintNameDBName `shouldBe` - ConstraintNameDB "treesfkparent" + ConstraintNameDB "treefkparent" it "has the right fields" $ do foreignFields `shouldBe` [ ( (FieldNameHS "parent", FieldNameDB "parent") diff --git a/persistent/ChangeLog.md b/persistent/ChangeLog.md index fa2b8b0cc..926ba7fbd 100644 --- a/persistent/ChangeLog.md +++ b/persistent/ChangeLog.md @@ -1,6 +1,76 @@ # Changelog for persistent -## Unreleased +## 2.13.0.0 (unreleased) + +* [#1244](https://github.com/yesodweb/persistent/pull/1244) + * Implement config for customising the FK name +* [#1252](https://github.com/yesodweb/persistent/pull/1252) + * `mkMigrate` now defers to `mkEntityDefList` and `migrateModels` instead of + fixing the foreign key references itself. + * `mkSave` was deprecated - the function did not fix foreign key references. + Please use `mkEntityDefList` instead. + * `EntityDef` will now include fields marked `MigrationOnly` and + `SafeToRemove`. Beforehand, those were filtered out, and `mkMigrate` + applied. The function `getEntityFields` wll only return fields defined on + the Haskell type - for all columns, see `getEntityFieldsDatabase`. +* [#1225](https://github.com/yesodweb/persistent/pull/1225) + * The fields and constructor for `SqlBackend` are no longer exported by + default. They are available from an internal module, + `Database.Persist.Sql.Types.Internal`. Breaking changes from `Internal` + modules are not reflected in the major version. This will allow us to + release new functionality without breaking your code. It's recommended to + switch to using the smart constructor functions and setter functions that + are now exported from `Database.Persist.Sql` instead. + * A new API is available for constructing and using a `SqlBackend`, provided + in `Database.Persist.SqlBackend`. Instead of using the `SqlBackend` + directly, use `mkSqlBackend` and the datatype `MkSqlBackendArgs`. The + `MkSqlBackendArgs` record has the same field names as the `SqlBackend`, so + the translation is easy: + ```diff +- SqlBackend ++ mkSqlBackend MkSqlBackendArgs + { connInsertSql = ... + , connCommit = ... + , connEscapeFieldName = ... + , connEscapeTableName = ... + , etc + } + ``` + Some fields were omitted in `MkSqlBackendArgs`. These fields are + *optional* - they provide enhanced or backend-specific functionality. For + these, use the setter functions like `setConnUpsertSql`. + * Previously hidden modules are now exposed under the `Internal` namespace. + * The `connLimitOffset` function used to have a `Bool` parameter. This + parameter is unused and has been removed. +* [#1234](https://github.com/yesodweb/persistent/pull/1234) + * You can now customize the default implied ID column. See the documentation + in `Database.Persist.ImplicitIdDef` for more details. + * Moved the various `Name` types into `Database.Persist.Names` + * Removed the `hasCompositeKey` function. See `hasCompositePrimaryKey` and + `hasNaturalKey` as replacements. + * The `EntityDef` constructor and field labels are not exported by default. + Get those from `Database.Persist.EntityDef.Internal`, but you should + migrate to the getters/setters in `Database.Persist.EntityDef` as you can. + * Added the `Database.Persist.FieldDef` and + `Database.Persist.FieldDef.Internal` modules. + * The `PersistSettings` type was made abstract. Please migrate to the + getters/setters defined in that `Database.Persist.Quasi`, or use + `Database.Persist.Quasi.Internal` if you don't mind the possibility of + breaking changes. + * Add the `runSqlCommand` function for running arbitrary SQL during + migrations. + * Add `migrateModels` function for a TH-free migration facility. +* [#1253](https://github.com/yesodweb/persistent/pull/1253) + * Add `discoverEntities` to discover instances of the class and return their + entity definitions. +* [#1250](https://github.com/yesodweb/persistent/pull/1250) + * The `mpsGeneric` function has been deprecated. If you need this + functionality, please comment with your needs on the GitHub issue tracker. + We may un-deprecate it, or we may provide a new and better means of + facilitating a solution to your problem. +* [#1255](https://github.com/yesodweb/persistent/pull/1255) + * `mkPersist` now checks to see if an instance already exists for + `PersistEntity` for the inputs. ## 2.12.1.2 @@ -12,6 +82,19 @@ * Refactor setEmbedField to use do notation * [#1237](https://github.com/yesodweb/persistent/pull/1237) * Remove nonEmptyOrFail function from recent tests +* [#1256](https://github.com/yesodweb/persistent/pull/1256) + * The QuasiQuoter has been refactored and improved. + * You can now use `mkPersistWith` to pass in a list of pre-existing + `EntityDef` to improve foreign key detection and splitting up models + across multiple modules. + * The `entityId` field now returns an `EntityIdDef`, which specifies what + the ID field actually is. This is a move to better support natural keys. + * Several types that had lists have been refactored to use nonempty lists to + better capture the semantics. + * `mkDeleteCascade` is deprecated. Please use the Cascade behavior directly + on fields. + * You can use `Key Foo` and `FooId` interchangeably in fields. + * Support for GHC < 8.4 dropped. ## 2.12.1.1 diff --git a/persistent/Database/Persist.hs b/persistent/Database/Persist.hs index e9846d4cc..7d1495961 100644 --- a/persistent/Database/Persist.hs +++ b/persistent/Database/Persist.hs @@ -1,4 +1,5 @@ {-# LANGUAGE ExistentialQuantification #-} + module Database.Persist ( module Database.Persist.Class , module Database.Persist.Types diff --git a/persistent/Database/Persist/Class.hs b/persistent/Database/Persist/Class.hs index e4fde5fc4..c7109b9c5 100644 --- a/persistent/Database/Persist/Class.hs +++ b/persistent/Database/Persist/Class.hs @@ -92,6 +92,7 @@ module Database.Persist.Class , PersistUniqueWrite (..) , OnlyOneUniqueKey (..) , AtLeastOneUniqueKey (..) + , onlyOneUniqueDef , NoUniqueKeysError , MultipleUniqueKeysError , getByValue diff --git a/persistent/Database/Persist/Class/DeleteCascade.hs b/persistent/Database/Persist/Class/DeleteCascade.hs index 88cc472ec..4ab445994 100644 --- a/persistent/Database/Persist/Class/DeleteCascade.hs +++ b/persistent/Database/Persist/Class/DeleteCascade.hs @@ -1,5 +1,7 @@ {-# LANGUAGE ExplicitForAll #-} -module Database.Persist.Class.DeleteCascade + + +module Database.Persist.Class.DeleteCascade {-# DEPRECATED "The DeleteCascade module is deprecated. You can now set cascade behavior directly on entities in the quasiquoter." #-} ( DeleteCascade (..) , deleteCascadeWhere ) where @@ -14,6 +16,8 @@ import Database.Persist.Class.PersistStore import Database.Persist.Class.PersistQuery import Database.Persist.Class.PersistEntity +{-# DEPRECATED DeleteCascade "The DeleteCascade class is deprecated since you can now define cascade behavior directly on an entity." #-} + -- | For combinations of backends and entities that support -- cascade-deletion. “Cascade-deletion” means that entries that depend on -- other entries to be deleted will be deleted as well. @@ -24,6 +28,8 @@ class (PersistStoreWrite backend, PersistEntity record, BaseBackend backend ~ Pe -- entry. deleteCascade :: MonadIO m => Key record -> ReaderT backend m () +{-# DEPRECATED deleteCascadeWhere "This function is deprecated since you can set cascading delete behavior directly on the entity." #-} + -- | Cascade-deletion of entries satisfying given filters. deleteCascadeWhere :: forall record backend m. (MonadIO m, DeleteCascade record backend, PersistQueryWrite backend) => [Filter record] -> ReaderT backend m () diff --git a/persistent/Database/Persist/Class/PersistEntity.hs b/persistent/Database/Persist/Class/PersistEntity.hs index edde12c87..61629ff00 100644 --- a/persistent/Database/Persist/Class/PersistEntity.hs +++ b/persistent/Database/Persist/Class/PersistEntity.hs @@ -1,15 +1,15 @@ +{-# LANGUAGE AllowAmbiguousTypes #-} +{-# LANGUAGE DataKinds #-} {-# LANGUAGE DeriveGeneric #-} -{-# LANGUAGE TypeApplications #-} -{-# LANGUAGE ScopedTypeVariables #-} +{-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE FunctionalDependencies #-} {-# LANGUAGE GADTs #-} -{-# LANGUAGE RankNTypes #-} {-# LANGUAGE KindSignatures #-} +{-# LANGUAGE RankNTypes #-} +{-# LANGUAGE ScopedTypeVariables #-} {-# LANGUAGE StandaloneDeriving #-} +{-# LANGUAGE TypeApplications #-} {-# LANGUAGE UndecidableInstances #-} -{-# LANGUAGE DataKinds #-} -{-# LANGUAGE FlexibleInstances #-} -{-# LANGUAGE AllowAmbiguousTypes #-} module Database.Persist.Class.PersistEntity ( PersistEntity (..) @@ -32,12 +32,22 @@ module Database.Persist.Class.PersistEntity , SymbolToField (..) ) where -import Data.Aeson (ToJSON (..), withObject, FromJSON (..), fromJSON, object, (.:), (.=), Value (Object)) +import Data.Aeson + ( FromJSON(..) + , ToJSON(..) + , Value(Object) + , fromJSON + , object + , withObject + , (.:) + , (.=) + ) import qualified Data.Aeson.Parser as AP -import Data.Aeson.Types (Parser,Result(Error,Success)) import Data.Aeson.Text (encodeToTextBuilder) +import Data.Aeson.Types (Parser, Result(Error, Success)) import Data.Attoparsec.ByteString (parseOnly) import qualified Data.HashMap.Strict as HM +import Data.List.NonEmpty (NonEmpty(..)) import Data.Maybe (isJust) import Data.Monoid (mappend) import Data.Text (Text) @@ -50,6 +60,7 @@ import GHC.OverloadedLabels import GHC.TypeLits import Database.Persist.Class.PersistField +import Database.Persist.Names import Database.Persist.Types.Base -- | Persistent serialized Haskell records to the database. @@ -104,7 +115,7 @@ class ( PersistField (Key record), ToJSON (Key record), FromJSON (Key record) -- | A meta operation to retrieve all the 'Unique' keys. persistUniqueKeys :: record -> [Unique record] -- | A lower level operation. - persistUniqueToFieldNames :: Unique record -> [(FieldNameHS, FieldNameDB)] + persistUniqueToFieldNames :: Unique record -> NonEmpty (FieldNameHS, FieldNameDB) -- | A lower level operation. persistUniqueToValues :: Unique record -> [PersistValue] diff --git a/persistent/Database/Persist/Class/PersistUnique.hs b/persistent/Database/Persist/Class/PersistUnique.hs index 80e280a70..5bbd3b704 100644 --- a/persistent/Database/Persist/Class/PersistUnique.hs +++ b/persistent/Database/Persist/Class/PersistUnique.hs @@ -55,8 +55,7 @@ import Database.Persist.Types -- you must manually place a unique index on a field to have a uniqueness -- constraint. -- -class (PersistCore backend, PersistStoreRead backend) => - PersistUniqueRead backend where +class PersistStoreRead backend => PersistUniqueRead backend where -- | Get a record by unique key, if available. Returns also the identifier. -- -- === __Example usage__ @@ -297,13 +296,13 @@ class PersistEntity record => OnlyOneUniqueKey record where -- | Given a proxy for a 'PersistEntity' record, this returns the sole -- 'UniqueDef' for that entity. -- --- @since TODO release me +-- @since 2.13.0.0 onlyOneUniqueDef :: (OnlyOneUniqueKey record, Monad proxy) => proxy record -> UniqueDef onlyOneUniqueDef prxy = - case entityUniques (entityDef prxy) of + case getEntityUniques (entityDef prxy) of [uniq] -> uniq _ -> error "impossible due to OnlyOneUniqueKey constraint" @@ -352,7 +351,7 @@ atLeastOneUniqueDef => proxy record -> NonEmpty UniqueDef atLeastOneUniqueDef prxy = - case entityUniques (entityDef prxy) of + case getEntityUniques (entityDef prxy) of (x:xs) -> x :| xs _ -> error "impossible due to AtLeastOneUniqueKey record constraint" diff --git a/persistent/Database/Persist/EntityDef.hs b/persistent/Database/Persist/EntityDef.hs new file mode 100644 index 000000000..aba4a12fa --- /dev/null +++ b/persistent/Database/Persist/EntityDef.hs @@ -0,0 +1,193 @@ +-- | An 'EntityDef' represents metadata about a type that @persistent@ uses to +-- store the type in the database, as well as generate Haskell code from it. +-- +-- @since 2.13.0.0 +module Database.Persist.EntityDef + ( -- * The 'EntityDef' type + EntityDef + -- * Construction + -- * Accessors + , getEntityHaskellName + , getEntityDBName + , getEntityFields + , getEntityFieldsDatabase + , getEntityForeignDefs + , getEntityUniques + , getEntityId + , getEntityIdField + , getEntityKeyFields + , getEntityComments + , getEntityExtra + , isEntitySum + , entityPrimary + , entitiesPrimary + , keyAndEntityFields + -- * Setters + , setEntityId + , setEntityIdDef + , setEntityDBName + , overEntityFields + -- * Related Types + , EntityIdDef(..) + ) where + +import Data.Text (Text) +import Data.Map (Map) +import Data.List.NonEmpty (NonEmpty) + +import Database.Persist.EntityDef.Internal +import Database.Persist.FieldDef + +import Database.Persist.Types.Base + ( UniqueDef + , ForeignDef + , entityKeyFields + ) +import Database.Persist.Names + +-- | Retrieve the list of 'UniqueDef' from an 'EntityDef'. This currently does +-- not include a @Primary@ key, if one is defined. A future version of +-- @persistent@ will include a @Primary@ key among the 'Unique' constructors for +-- the 'Entity'. +-- +-- @since 2.13.0.0 +getEntityUniques + :: EntityDef + -> [UniqueDef] +getEntityUniques = entityUniques + +-- | Retrieve the Haskell name of the given entity. +-- +-- @since 2.13.0.0 +getEntityHaskellName + :: EntityDef + -> EntityNameHS +getEntityHaskellName = entityHaskell + +-- | Return the database name for the given entity. +-- +-- @since 2.13.0.0 +getEntityDBName + :: EntityDef + -> EntityNameDB +getEntityDBName = entityDB + +getEntityExtra :: EntityDef -> Map Text [[Text]] +getEntityExtra = entityExtra + +-- | +-- +-- @since 2.13.0.0 +setEntityDBName :: EntityNameDB -> EntityDef -> EntityDef +setEntityDBName db ed = ed { entityDB = db } + +getEntityComments :: EntityDef -> Maybe Text +getEntityComments = entityComments + +-- | +-- +-- @since 2.13.0.0 +getEntityForeignDefs + :: EntityDef + -> [ForeignDef] +getEntityForeignDefs = entityForeigns + +-- | Retrieve the list of 'FieldDef' that makes up the fields of the entity. +-- +-- This does not return the fields for an @Id@ column or an implicit @id@. It +-- will return the key columns if you used the @Primary@ syntax for defining the +-- primary key. +-- +-- This does not return fields that are marked 'SafeToRemove' or 'MigrationOnly' +-- - so it only returns fields that are represented in the Haskell type. If you +-- need those fields, use 'getEntityFieldsDatabase'. +-- +-- @since 2.13.0.0 +getEntityFields + :: EntityDef + -> [FieldDef] +getEntityFields = filter isHaskellField . entityFields + +-- | This returns all of the 'FieldDef' defined for the 'EntityDef', including +-- those fields that are marked as 'MigrationOnly' (and therefore only present +-- in the database) or 'SafeToRemove' (and a migration will drop the column if +-- it exists in the database). +-- +-- For all the fields that are present on the Haskell-type, see +-- 'getEntityFields'. +-- +-- @since 2.13.0.0 +getEntityFieldsDatabase + :: EntityDef + -> [FieldDef] +getEntityFieldsDatabase = entityFields + +-- | +-- +-- @since 2.13.0.0 +isEntitySum + :: EntityDef + -> Bool +isEntitySum = entitySum + +-- | +-- +-- @since 2.13.0.0 +getEntityId + :: EntityDef + -> EntityIdDef +getEntityId = entityId + +-- | +-- +-- @since 2.13.0.0 +getEntityIdField :: EntityDef -> Maybe FieldDef +getEntityIdField ed = + case getEntityId ed of + EntityIdField fd -> + pure fd + _ -> + Nothing + +-- | Set an 'entityId' to be the given 'FieldDef'. +-- +-- @since 2.13.0.0 +setEntityId + :: FieldDef + -> EntityDef + -> EntityDef +setEntityId fd = setEntityIdDef (EntityIdField fd) + +-- | +-- +-- @since 2.13.0.0 +setEntityIdDef + :: EntityIdDef + -> EntityDef + -> EntityDef +setEntityIdDef i ed = ed { entityId = i } + +-- | +-- +-- @since 2.13.0.0 +getEntityKeyFields + :: EntityDef + -> NonEmpty FieldDef +getEntityKeyFields = entityKeyFields + +-- | TODO +-- +-- @since 2.13.0.0 +setEntityFields :: [FieldDef] -> EntityDef -> EntityDef +setEntityFields fd ed = ed { entityFields = fd } + +-- | Perform a mapping function over all of the entity fields, as determined by +-- 'getEntityFieldsDatabase'. +-- +-- @since 2.13.0.0 +overEntityFields + :: ([FieldDef] -> [FieldDef]) + -> EntityDef + -> EntityDef +overEntityFields f ed = + setEntityFields (f (getEntityFieldsDatabase ed)) ed diff --git a/persistent/Database/Persist/EntityDef/Internal.hs b/persistent/Database/Persist/EntityDef/Internal.hs new file mode 100644 index 000000000..16adf92e0 --- /dev/null +++ b/persistent/Database/Persist/EntityDef/Internal.hs @@ -0,0 +1,18 @@ +-- | The 'EntityDef' type, fields, and constructor are exported from this +-- module. Breaking changes to the 'EntityDef' type are not reflected in +-- the major version of the API. Please import from +-- "Database.Persist.EntityDef" instead. +-- +-- If you need this module, please file a GitHub issue why. +-- +-- @since 2.13.0.0 +module Database.Persist.EntityDef.Internal + ( EntityDef(..) + , entityPrimary + , entitiesPrimary + , keyAndEntityFields + , toEmbedEntityDef + , EntityIdDef(..) + ) where + +import Database.Persist.Types.Base diff --git a/persistent/Database/Persist/FieldDef.hs b/persistent/Database/Persist/FieldDef.hs new file mode 100644 index 000000000..fed4c3f81 --- /dev/null +++ b/persistent/Database/Persist/FieldDef.hs @@ -0,0 +1,45 @@ +-- | +-- +-- @since 2.13.0.0 +module Database.Persist.FieldDef + ( -- * The 'FieldDef' type + FieldDef + -- ** Setters + , setFieldAttrs + , overFieldAttrs + , addFieldAttr + -- ** Helpers + , isFieldNotGenerated + , isHaskellField + -- * 'FieldCascade' + , FieldCascade(..) + , renderFieldCascade + , renderCascadeAction + , noCascade + , CascadeAction(..) + ) where + +import Database.Persist.FieldDef.Internal + +import Database.Persist.Types.Base + ( isHaskellField + , FieldAttr + ) + +-- | Replace the 'FieldDef' 'FieldAttr' with the new list. +-- +-- @since 2.13.0.0 +setFieldAttrs :: [FieldAttr] -> FieldDef -> FieldDef +setFieldAttrs fas fd = fd { fieldAttrs = fas } + +-- | Modify the list of field attributes. +-- +-- @since 2.13.0.0 +overFieldAttrs :: ([FieldAttr] -> [FieldAttr]) -> FieldDef -> FieldDef +overFieldAttrs k fd = fd { fieldAttrs = k (fieldAttrs fd) } + +-- | Add an attribute to the list of field attributes. +-- +-- @since 2.13.0.0 +addFieldAttr :: FieldAttr -> FieldDef -> FieldDef +addFieldAttr fa = overFieldAttrs (fa :) diff --git a/persistent/Database/Persist/FieldDef/Internal.hs b/persistent/Database/Persist/FieldDef/Internal.hs new file mode 100644 index 000000000..433806d37 --- /dev/null +++ b/persistent/Database/Persist/FieldDef/Internal.hs @@ -0,0 +1,14 @@ +-- | TODO: standard Internal moduel boilerplate +-- +-- @since 2.13.0.0 +module Database.Persist.FieldDef.Internal + ( FieldDef(..) + , isFieldNotGenerated + , FieldCascade(..) + , renderFieldCascade + , renderCascadeAction + , noCascade + , CascadeAction(..) + ) where + +import Database.Persist.Types.Base diff --git a/persistent/Database/Persist/ImplicitIdDef.hs b/persistent/Database/Persist/ImplicitIdDef.hs new file mode 100644 index 000000000..e82f5c871 --- /dev/null +++ b/persistent/Database/Persist/ImplicitIdDef.hs @@ -0,0 +1,57 @@ +{-# LANGUAGE TemplateHaskellQuotes #-} + +-- | This module contains types and functions for creating an 'ImplicitIdDef', +-- which allows you to customize the implied ID column that @persistent@ +-- generates. +-- +-- If this module doesn't suit your needs, you may want to import +-- "Database.Persist.ImplicitIdDef.Internal" instead. If you do so, please file +-- an issue on GitHub so we can support your needs. Breaking changes to that +-- module will *not* be accompanied with a major version bump. +-- +-- @since 2.13.0.0 +module Database.Persist.ImplicitIdDef + ( -- * The Type + ImplicitIdDef + -- * Construction + , mkImplicitIdDef + -- * Autoincrementing Integer Key + , autoIncrementingInteger + -- * Getters + -- * Setters + , setImplicitIdDefMaxLen + , unsafeClearDefaultImplicitId + ) where + +import Language.Haskell.TH + +import Database.Persist.ImplicitIdDef.Internal +import Database.Persist.Types.Base + ( FieldType(..) + , SqlType(..) + ) +import Database.Persist.Class (BackendKey) +import Database.Persist.Names + +-- | This is the default variant. Setting the implicit ID definition to this +-- value should not have any change at all on how entities are defined by +-- default. +-- +-- @since 2.13.0.0 +autoIncrementingInteger :: ImplicitIdDef +autoIncrementingInteger = + ImplicitIdDef + { iidFieldType = \entName -> + FTTypeCon Nothing $ unEntityNameHS entName `mappend` "Id" + , iidFieldSqlType = + SqlInt64 + , iidType = \isMpsGeneric mpsBackendType -> + ConT ''BackendKey `AppT` + if isMpsGeneric + then VarT (mkName "backend") + else mpsBackendType + , iidDefault = + Nothing + , iidMaxLen = + Nothing + } diff --git a/persistent/Database/Persist/ImplicitIdDef/Internal.hs b/persistent/Database/Persist/ImplicitIdDef/Internal.hs new file mode 100644 index 000000000..1aa002e40 --- /dev/null +++ b/persistent/Database/Persist/ImplicitIdDef/Internal.hs @@ -0,0 +1,226 @@ +{-# LANGUAGE AllowAmbiguousTypes #-} +{-# LANGUAGE PolyKinds #-} +{-# LANGUAGE RankNTypes #-} +{-# LANGUAGE ScopedTypeVariables #-} +{-# LANGUAGE StrictData #-} +{-# LANGUAGE TypeApplications #-} +{-# LANGUAGE TypeInType #-} + +-- | WARNING: This is an @Internal@ module. As such, breaking changes to the API +-- of this module will not have a corresponding major version bump. +-- +-- Please depend on "Database.Persist.ImplicitIdDef" instead. If you can't use +-- that module, please file an issue on GitHub with your desired use case. +-- +-- @since 2.13.0.0 +module Database.Persist.ImplicitIdDef.Internal where + +import Data.Proxy +import Data.Text (Text) +import qualified Data.Text as Text +import Language.Haskell.TH (Type) +import LiftType +import Type.Reflection +import Data.Typeable (eqT) +import Data.Foldable (asum) + +import Database.Persist.Class.PersistField (PersistField) +import Database.Persist.Names +import Database.Persist.Sql.Class +import Database.Persist.Types + +-- | A specification for how the implied ID columns are created. +-- +-- By default, @persistent@ will give each table a default column named @id@ +-- (customizable by 'PersistSettings'), and the column type will be whatever +-- you'd expect from @'BackendKey' yourBackendType@. For The 'SqlBackend' type, +-- this is an auto incrementing integer primary key. +-- +-- You might want to give a different example. A common use case in postgresql +-- is to use the UUID type, and automatically generate them using a SQL +-- function. +-- +-- Previously, you'd need to add a custom @Id@ annotation for each model. +-- +-- > User +-- > Id UUID default="uuid_generate_v1mc()" +-- > name Text +-- > +-- > Dog +-- > Id UUID default="uuid_generate_v1mc()" +-- > name Text +-- > user UserId +-- +-- Now, you can simply create an 'ImplicitIdDef' that corresponds to this +-- declaration. +-- +-- @ +-- newtype UUID = UUID 'ByteString' +-- +-- instance 'PersistField' UUID where +-- 'toPersistValue' (UUID bs) = +-- 'PersistLiteral_' 'Escaped' bs +-- 'fromPersistValue' pv = +-- case pv of +-- PersistLiteral_ Escaped bs -> +-- Right (UUID bs) +-- _ -> +-- Left "nope" +-- +-- instance 'PersistFieldSql' UUID where +-- 'sqlType' _ = 'SqlOther' "UUID" +-- @ +-- +-- With this instance at the ready, we can now create our implicit definition: +-- +-- @ +-- uuidDef :: ImplicitIdDef +-- uuidDef = mkImplicitIdDef \@UUID "uuid_generate_v1mc()" +-- @ +-- +-- And we can use 'setImplicitIdDef' to use this with the 'MkPersistSettings' +-- for our block. +-- +-- @ +-- mkPersist (setImplicitIdDef uuidDef sqlSettings) [persistLowerCase| ... |] +-- @ +-- +-- TODO: either explain interaction with mkMigrate or fix it. see issue #1249 +-- for more details. +-- +-- @since 2.13.0.0 +data ImplicitIdDef = ImplicitIdDef + { iidFieldType :: EntityNameHS -> FieldType + -- ^ The field type. Accepts the 'EntityNameHS' if you want to refer to it. + -- By default, @Id@ is appended to the end of the Haskell name. + -- + -- @since 2.13.0.0 + , iidFieldSqlType :: SqlType + -- ^ The 'SqlType' for the default column. By default, this is 'SqlInt64' to + -- correspond with an autoincrementing integer primary key. + -- + -- @since 2.13.0.0 + , iidType :: Bool -> Type -> Type + -- ^ The Bool argument is whether or not the 'MkPersistBackend' type has the + -- 'mpsGeneric' field set. + -- + -- The 'Type' is the 'mpsBackend' value. + -- + -- The default uses @'BackendKey' 'SqlBackend'@ (or a generic equivalent). + -- + -- @since 2.13.0.0 + , iidDefault :: Maybe Text + -- ^ The default expression for the field. Note that setting this to + -- 'Nothing' is unsafe. see + -- https://github.com/yesodweb/persistent/issues/1247 for more information. + -- + -- With some cases - like the Postgresql @SERIAL@ type - this is safe, since + -- there's an implied default. + -- + -- @since 2.13.0.0 + , iidMaxLen :: Maybe Integer + -- ^ Specify the maximum length for a key column. This is necessary for + -- @VARCHAR@ columns, like @UUID@ in MySQL. MySQL will throw a runtime error + -- if a text or binary column is used in an index without a length + -- specification. + -- + -- @since 2.13.0.0 + } + +-- | Create an 'ImplicitIdDef' based on the 'Typeable' and 'PersistFieldSql' +-- constraints in scope. +-- +-- This function uses the @TypeApplications@ syntax. Let's look at an example +-- that works with Postgres UUIDs. +-- +-- > newtype UUID = UUID Text +-- > deriving newtype PersistField +-- > +-- > instance PersistFieldSql UUID where +-- > sqlType _ = SqlOther "UUID" +-- > +-- > idDef :: ImplicitIdDef +-- > idDef = mkImplicitIdDefTypeable @UUID "uuid_generate_v1mc()" +-- +-- This 'ImplicitIdDef' will generate default UUID columns, and the database +-- will call the @uuid_generate_v1mc()@ function to generate the value for new +-- rows being inserted. +-- +-- If the type @t@ is 'Text' or 'String' then a @max_len@ attribute of 200 is +-- set. To customize this, use 'setImplicitIdDefMaxLen'. +-- +-- @since 2.13.0.0 +mkImplicitIdDef + :: forall t. (Typeable t, PersistFieldSql t) + => Text + -- ^ The default expression to use for columns. Should be valid SQL in the + -- language you're using. + -> ImplicitIdDef +mkImplicitIdDef def = + ImplicitIdDef + { iidFieldType = \_ -> + fieldTypeFromTypeable @t + , iidFieldSqlType = + sqlType (Proxy @t) + , iidType = + \_ _ -> liftType @t + , iidDefault = + Just def + , iidMaxLen = + -- this follows a special casing behavior that @persistent@ has done + -- for a while now. this keeps folks code from breaking and probably + -- is mostly what people want. + asum + [ 200 <$ eqT @t @Text + , 200 <$ eqT @t @String + ] + } + +-- | Set the maximum length of the implied ID column. This is required for +-- any type where the associated 'SqlType' is a @TEXT@ or @VARCHAR@ sort of +-- thing. +-- +-- @since 2.13.0.0 +setImplicitIdDefMaxLen + :: Integer + -> ImplicitIdDef + -> ImplicitIdDef +setImplicitIdDefMaxLen i iid = iid { iidMaxLen = Just i } + +-- | This function converts a 'Typeable' type into a @persistent@ +-- representation of the type of a field - 'FieldTyp'. +-- +-- @since 2.13.0.0 +fieldTypeFromTypeable :: forall t. (PersistField t, Typeable t) => FieldType +fieldTypeFromTypeable = go (typeRep @t) + where + go :: forall k (a :: k). TypeRep a -> FieldType + go tr = + case tr of + Con tyCon -> + FTTypeCon Nothing $ Text.pack $ tyConName tyCon + App trA trB -> + FTApp (go trA) (go trB) + Fun _ _ -> + error "No functions in field defs." + +-- | Remove the default attribute of the 'ImplicitIdDef' column. This will +-- require you to provide an ID for the model with every insert, using +-- 'insertKey' instead of 'insert', unless the type has some means of getting +-- around that in the migrations. +-- +-- As an example, the Postgresql @SERIAL@ type expands to an autoincrementing +-- integer. Postgres will implicitly create the relevant series and set the +-- default to be @NEXTVAL('series_name')@. A default is therefore unnecessary to +-- use for this type. +-- +-- However, for a @UUID@, postgres *does not* have an implicit default. You must +-- either specify a default UUID generation function, or insert them yourself +-- (again, using 'insertKey'). +-- +-- This function will be deprecated in the future when omiting the default +-- implicit ID column is more fully supported. +-- +-- @since 2.13.0.0 +unsafeClearDefaultImplicitId :: ImplicitIdDef -> ImplicitIdDef +unsafeClearDefaultImplicitId iid = iid { iidDefault = Nothing } diff --git a/persistent/Database/Persist/Names.hs b/persistent/Database/Persist/Names.hs new file mode 100644 index 000000000..e075ff604 --- /dev/null +++ b/persistent/Database/Persist/Names.hs @@ -0,0 +1,72 @@ +{-# LANGUAGE DeriveLift #-} + +-- | This module contains types and functions for working with and +-- disambiguating database and Haskell names. +-- +-- @since 2.13.0.0 +module Database.Persist.Names where + +import Data.Text (Text) +import Language.Haskell.TH.Syntax (Lift) +-- Bring `Lift (Map k v)` instance into scope, as well as `Lift Text` +-- instance on pre-1.2.4 versions of `text` +import Instances.TH.Lift () + +-- | Convenience operations for working with '-NameDB' types. +-- +-- @since 2.12.0.0 +class DatabaseName a where + escapeWith :: (Text -> str) -> (a -> str) + +-- | An 'EntityNameDB' represents the datastore-side name that @persistent@ +-- will use for an entity. +-- +-- @since 2.12.0.0 +newtype FieldNameDB = FieldNameDB { unFieldNameDB :: Text } + deriving (Show, Eq, Read, Ord, Lift) + +-- | @since 2.12.0.0 +instance DatabaseName FieldNameDB where + escapeWith f (FieldNameDB n) = f n + +-- | A 'FieldNameHS' represents the Haskell-side name that @persistent@ +-- will use for a field. +-- +-- @since 2.12.0.0 +newtype FieldNameHS = FieldNameHS { unFieldNameHS :: Text } + deriving (Show, Eq, Read, Ord, Lift) + +-- | An 'EntityNameHS' represents the Haskell-side name that @persistent@ +-- will use for an entity. +-- +-- @since 2.12.0.0 +newtype EntityNameHS = EntityNameHS { unEntityNameHS :: Text } + deriving (Show, Eq, Read, Ord, Lift) + +-- | An 'EntityNameDB' represents the datastore-side name that @persistent@ +-- will use for an entity. +-- +-- @since 2.12.0.0 +newtype EntityNameDB = EntityNameDB { unEntityNameDB :: Text } + deriving (Show, Eq, Read, Ord, Lift) + +instance DatabaseName EntityNameDB where + escapeWith f (EntityNameDB n) = f n + +-- | A 'ConstraintNameDB' represents the datastore-side name that @persistent@ +-- will use for a constraint. +-- +-- @since 2.12.0.0 +newtype ConstraintNameDB = ConstraintNameDB { unConstraintNameDB :: Text } + deriving (Show, Eq, Read, Ord, Lift) + +-- | @since 2.12.0.0 +instance DatabaseName ConstraintNameDB where + escapeWith f (ConstraintNameDB n) = f n + +-- | An 'ConstraintNameHS' represents the Haskell-side name that @persistent@ +-- will use for a constraint. +-- +-- @since 2.12.0.0 +newtype ConstraintNameHS = ConstraintNameHS { unConstraintNameHS :: Text } + deriving (Show, Eq, Read, Ord, Lift) diff --git a/persistent/Database/Persist/PersistValue.hs b/persistent/Database/Persist/PersistValue.hs new file mode 100644 index 000000000..0317a7189 --- /dev/null +++ b/persistent/Database/Persist/PersistValue.hs @@ -0,0 +1,253 @@ +{-# LANGUAGE PatternSynonyms #-} + +-- | This module contains an intermediate representation of values before the +-- backends serialize them into explicit database types. +-- +-- @since 2.13.0.0 +module Database.Persist.PersistValue + ( module Database.Persist.PersistValue + , PersistValue(.., PersistLiteral, PersistLiteralEscaped, PersistDbSpecific) + ) where + +import qualified Data.ByteString.Base64 as B64 +import qualified Data.Text.Encoding as TE +import qualified Data.ByteString.Char8 as BS8 +import qualified Data.Vector as V +import Data.Int (Int64) +import qualified Data.Scientific +import Data.Text.Encoding.Error (lenientDecode) +import Data.Bits (shiftL, shiftR) +import Control.Arrow (second) +import Numeric (readHex, showHex) +import qualified Data.Text as Text +import Data.Text (Text) +import Data.ByteString (ByteString, foldl') +import Data.Time (Day, TimeOfDay, UTCTime) +import Web.PathPieces (PathPiece(..)) +import qualified Data.Aeson as A +import qualified Data.ByteString as BS +import qualified Data.HashMap.Strict as HM +import Web.HttpApiData + ( FromHttpApiData(..) + , ToHttpApiData(..) + , parseUrlPieceMaybe + , readTextData + ) + +-- | A raw value which can be stored in any backend and can be marshalled to +-- and from a 'PersistField'. +data PersistValue + = PersistText Text + | PersistByteString ByteString + | PersistInt64 Int64 + | PersistDouble Double + | PersistRational Rational + | PersistBool Bool + | PersistDay Day + | PersistTimeOfDay TimeOfDay + | PersistUTCTime UTCTime + | PersistNull + | PersistList [PersistValue] + | PersistMap [(Text, PersistValue)] + | PersistObjectId ByteString + -- ^ Intended especially for MongoDB backend + | PersistArray [PersistValue] + -- ^ Intended especially for PostgreSQL backend for text arrays + | PersistLiteral_ LiteralType ByteString + -- ^ This constructor is used to specify some raw literal value for the + -- backend. The 'LiteralType' value specifies how the value should be + -- escaped. This can be used to make special, custom types avaialable + -- in the back end. + -- + -- @since 2.12.0.0 + deriving (Show, Read, Eq, Ord) + +-- | A type that determines how a backend should handle the literal. +-- +-- @since 2.12.0.0 +data LiteralType + = Escaped + -- ^ The accompanying value will be escaped before inserting into the + -- database. This is the correct default choice to use. + -- + -- @since 2.12.0.0 + | Unescaped + -- ^ The accompanying value will not be escaped when inserting into the + -- database. This is potentially dangerous - use this with care. + -- + -- @since 2.12.0.0 + | DbSpecific + -- ^ The 'DbSpecific' constructor corresponds to the legacy + -- 'PersistDbSpecific' constructor. We need to keep this around because + -- old databases may have serialized JSON representations that + -- reference this. We don't want to break the ability of a database to + -- load rows. + -- + -- @since 2.12.0.0 + deriving (Show, Read, Eq, Ord) + +-- | This pattern synonym used to be a data constructor for the +-- 'PersistValue' type. It was changed to be a pattern so that JSON-encoded +-- database values could be parsed into their corresponding values. You +-- should not use this, and instead prefer to pattern match on +-- `PersistLiteral_` directly. +-- +-- If you use this, it will overlap a patern match on the 'PersistLiteral_, +-- 'PersistLiteral', and 'PersistLiteralEscaped' patterns. If you need to +-- disambiguate between these constructors, pattern match on +-- 'PersistLiteral_' directly. +-- +-- @since 2.12.0.0 +pattern PersistDbSpecific :: ByteString -> PersistValue +pattern PersistDbSpecific bs <- PersistLiteral_ _ bs where + PersistDbSpecific bs = PersistLiteral_ DbSpecific bs + +-- | This pattern synonym used to be a data constructor on 'PersistValue', +-- but was changed into a catch-all pattern synonym to allow backwards +-- compatiblity with database types. See the documentation on +-- 'PersistDbSpecific' for more details. +-- +-- @since 2.12.0.0 +pattern PersistLiteralEscaped :: ByteString -> PersistValue +pattern PersistLiteralEscaped bs <- PersistLiteral_ _ bs where + PersistLiteralEscaped bs = PersistLiteral_ Escaped bs + +-- | This pattern synonym used to be a data constructor on 'PersistValue', +-- but was changed into a catch-all pattern synonym to allow backwards +-- compatiblity with database types. See the documentation on +-- 'PersistDbSpecific' for more details. +-- +-- @since 2.12.0.0 +pattern PersistLiteral :: ByteString -> PersistValue +pattern PersistLiteral bs <- PersistLiteral_ _ bs where + PersistLiteral bs = PersistLiteral_ Unescaped bs + +{-# DEPRECATED PersistDbSpecific "Deprecated since 2.11 because of inconsistent escaping behavior across backends. The Postgres backend escapes these values, while the MySQL backend does not. If you are using this, please switch to 'PersistLiteral_' and provide a relevant 'LiteralType' for your conversion." #-} + +instance ToHttpApiData PersistValue where + toUrlPiece val = + case fromPersistValueText val of + Left e -> error $ Text.unpack e + Right y -> y + +instance FromHttpApiData PersistValue where + parseUrlPiece input = + PersistInt64 <$> parseUrlPiece input + PersistList <$> readTextData input + PersistText <$> return input + where + infixl 3 + Left _ y = y + x _ = x + +instance PathPiece PersistValue where + toPathPiece = toUrlPiece + fromPathPiece = parseUrlPieceMaybe + +fromPersistValueText :: PersistValue -> Either Text Text +fromPersistValueText (PersistText s) = Right s +fromPersistValueText (PersistByteString bs) = + Right $ TE.decodeUtf8With lenientDecode bs +fromPersistValueText (PersistInt64 i) = Right $ Text.pack $ show i +fromPersistValueText (PersistDouble d) = Right $ Text.pack $ show d +fromPersistValueText (PersistRational r) = Right $ Text.pack $ show r +fromPersistValueText (PersistDay d) = Right $ Text.pack $ show d +fromPersistValueText (PersistTimeOfDay d) = Right $ Text.pack $ show d +fromPersistValueText (PersistUTCTime d) = Right $ Text.pack $ show d +fromPersistValueText PersistNull = Left "Unexpected null" +fromPersistValueText (PersistBool b) = Right $ Text.pack $ show b +fromPersistValueText (PersistList _) = Left "Cannot convert PersistList to Text" +fromPersistValueText (PersistMap _) = Left "Cannot convert PersistMap to Text" +fromPersistValueText (PersistObjectId _) = Left "Cannot convert PersistObjectId to Text" +fromPersistValueText (PersistArray _) = Left "Cannot convert PersistArray to Text" +fromPersistValueText (PersistLiteral_ _ _) = Left "Cannot convert PersistLiteral to Text" + +instance A.ToJSON PersistValue where + toJSON (PersistText t) = A.String $ Text.cons 's' t + toJSON (PersistByteString b) = A.String $ Text.cons 'b' $ TE.decodeUtf8 $ B64.encode b + toJSON (PersistInt64 i) = A.Number $ fromIntegral i + toJSON (PersistDouble d) = A.Number $ Data.Scientific.fromFloatDigits d + toJSON (PersistRational r) = A.String $ Text.pack $ 'r' : show r + toJSON (PersistBool b) = A.Bool b + toJSON (PersistTimeOfDay t) = A.String $ Text.pack $ 't' : show t + toJSON (PersistUTCTime u) = A.String $ Text.pack $ 'u' : show u + toJSON (PersistDay d) = A.String $ Text.pack $ 'd' : show d + toJSON PersistNull = A.Null + toJSON (PersistList l) = A.Array $ V.fromList $ map A.toJSON l + toJSON (PersistMap m) = A.object $ map (second A.toJSON) m + toJSON (PersistLiteral_ litTy b) = + let encoded = TE.decodeUtf8 $ B64.encode b + prefix = + case litTy of + DbSpecific -> 'p' + Unescaped -> 'l' + Escaped -> 'e' + in + A.String $ Text.cons prefix encoded + toJSON (PersistArray a) = A.Array $ V.fromList $ map A.toJSON a + toJSON (PersistObjectId o) = + A.toJSON $ showChar 'o' $ showHexLen 8 (bs2i four) $ showHexLen 16 (bs2i eight) "" + where + (four, eight) = BS8.splitAt 4 o + + -- taken from crypto-api + bs2i :: ByteString -> Integer + bs2i bs = foldl' (\i b -> (i `shiftL` 8) + fromIntegral b) 0 bs + {-# INLINE bs2i #-} + + -- showHex of n padded with leading zeros if necessary to fill d digits + -- taken from Data.BSON + showHexLen :: (Show n, Integral n) => Int -> n -> ShowS + showHexLen d n = showString (replicate (d - sigDigits n) '0') . showHex n where + sigDigits 0 = 1 + sigDigits n' = truncate (logBase (16 :: Double) $ fromIntegral n') + 1 + +instance A.FromJSON PersistValue where + parseJSON (A.String t0) = + case Text.uncons t0 of + Nothing -> fail "Null string" + Just ('p', t) -> either (\_ -> fail "Invalid base64") (return . PersistDbSpecific) + $ B64.decode $ TE.encodeUtf8 t + Just ('l', t) -> either (\_ -> fail "Invalid base64") (return . PersistLiteral) + $ B64.decode $ TE.encodeUtf8 t + Just ('e', t) -> either (\_ -> fail "Invalid base64") (return . PersistLiteralEscaped) + $ B64.decode $ TE.encodeUtf8 t + Just ('s', t) -> return $ PersistText t + Just ('b', t) -> either (\_ -> fail "Invalid base64") (return . PersistByteString) + $ B64.decode $ TE.encodeUtf8 t + Just ('t', t) -> PersistTimeOfDay <$> readMay t + Just ('u', t) -> PersistUTCTime <$> readMay t + Just ('d', t) -> PersistDay <$> readMay t + Just ('r', t) -> PersistRational <$> readMay t + Just ('o', t) -> maybe + (fail "Invalid base64") + (return . PersistObjectId . i2bs (8 * 12) . fst) + $ headMay $ readHex $ Text.unpack t + Just (c, _) -> fail $ "Unknown prefix: " ++ [c] + where + headMay [] = Nothing + headMay (x:_) = Just x + readMay t = + case reads $ Text.unpack t of + (x, _):_ -> return x + [] -> fail "Could not read" + + -- taken from crypto-api + -- |@i2bs bitLen i@ converts @i@ to a 'ByteString' of @bitLen@ bits (must be a multiple of 8). + i2bs :: Int -> Integer -> ByteString + i2bs l i = BS.unfoldr (\l' -> if l' < 0 then Nothing else Just (fromIntegral (i `shiftR` l'), l' - 8)) (l-8) + {-# INLINE i2bs #-} + + + parseJSON (A.Number n) = return $ + if fromInteger (floor n) == n + then PersistInt64 $ floor n + else PersistDouble $ fromRational $ toRational n + parseJSON (A.Bool b) = return $ PersistBool b + parseJSON A.Null = return PersistNull + parseJSON (A.Array a) = fmap PersistList (mapM A.parseJSON $ V.toList a) + parseJSON (A.Object o) = + fmap PersistMap $ mapM go $ HM.toList o + where + go (k, v) = (,) k <$> A.parseJSON v + diff --git a/persistent/Database/Persist/Quasi.hs b/persistent/Database/Persist/Quasi.hs index 9c2e617e1..7bf538637 100644 --- a/persistent/Database/Persist/Quasi.hs +++ b/persistent/Database/Persist/Quasi.hs @@ -1,10 +1,8 @@ -{-# LANGUAGE BangPatterns #-} {-# LANGUAGE CPP #-} {-# LANGUAGE PatternGuards #-} {-# LANGUAGE RankNTypes #-} {-# LANGUAGE StandaloneDeriving #-} {-# LANGUAGE UndecidableInstances #-} -{-# LANGUAGE ViewPatterns #-} {-| This module defines the Persistent entity syntax used in the quasiquoter to generate persistent entities. @@ -415,822 +413,69 @@ Unfortunately, we can't use this to create Haddocks for you, because ))) -import Control.Arrow ((&&&)) -import Control.Monad (mplus, msum) -import Data.Char (isLower, isSpace, isUpper, toLower) -import Data.List (find, foldl') -import Data.List.NonEmpty (NonEmpty(..)) -import qualified Data.List.NonEmpty as NEL -import qualified Data.Map as M -import Data.Maybe (fromMaybe, listToMaybe, mapMaybe, maybeToList) -import Data.Monoid (mappend) -#if !MIN_VERSION_base(4,11,0) --- This can be removed when GHC < 8.2.2 isn't supported anymore -import Data.Semigroup ((<>)) -#endif import Data.Text (Text) -import qualified Data.Text as T -import Database.Persist.Types -import Text.Read (readEither) - -data ParseState a = PSDone | PSFail String | PSSuccess a Text deriving Show - -parseFieldType :: Text -> Either String FieldType -parseFieldType t0 = - case parseApplyFT t0 of - PSSuccess ft t' - | T.all isSpace t' -> Right ft - PSFail err -> Left $ "PSFail " ++ err - other -> Left $ show other - where - parseApplyFT t = - case goMany id t of - PSSuccess (ft:fts) t' -> PSSuccess (foldl' FTApp ft fts) t' - PSSuccess [] _ -> PSFail "empty" - PSFail err -> PSFail err - PSDone -> PSDone - - parseEnclosed :: Char -> (FieldType -> FieldType) -> Text -> ParseState FieldType - parseEnclosed end ftMod t = - let (a, b) = T.break (== end) t - in case parseApplyFT a of - PSSuccess ft t' -> case (T.dropWhile isSpace t', T.uncons b) of - ("", Just (c, t'')) | c == end -> PSSuccess (ftMod ft) (t'' `Data.Monoid.mappend` t') - (x, y) -> PSFail $ show (b, x, y) - x -> PSFail $ show x - - parse1 t = - case T.uncons t of - Nothing -> PSDone - Just (c, t') - | isSpace c -> parse1 $ T.dropWhile isSpace t' - | c == '(' -> parseEnclosed ')' id t' - | c == '[' -> parseEnclosed ']' FTList t' - | isUpper c || c == '\'' -> - let (a, b) = T.break (\x -> isSpace x || x `elem` ("()[]"::String)) t' - in PSSuccess (parseFieldTypePiece c a) b - | otherwise -> PSFail $ show (c, t') - - goMany front t = - case parse1 t of - PSSuccess x t' -> goMany (front . (x:)) t' - PSFail err -> PSFail err - PSDone -> PSSuccess (front []) t - -- _ -> - -parseFieldTypePiece :: Char -> Text -> FieldType -parseFieldTypePiece fstChar rest = - case fstChar of - '\'' -> - FTTypePromoted rest - _ -> - let t = T.cons fstChar rest - in case T.breakOnEnd "." t of - (_, "") -> FTTypeCon Nothing t - ("", _) -> FTTypeCon Nothing t - (a, b) -> FTTypeCon (Just $ T.init a) b - -data PersistSettings = PersistSettings - { psToDBName :: !(Text -> Text) - , psStrictFields :: !Bool - -- ^ Whether fields are by default strict. Default value: @True@. - -- - -- @since 1.2 - , psIdName :: !Text - -- ^ The name of the id column. Default value: @id@ - -- The name of the id column can also be changed on a per-model basis - -- - -- - -- @since 2.0 - } - -defaultPersistSettings, upperCaseSettings, lowerCaseSettings :: PersistSettings -defaultPersistSettings = PersistSettings - { psToDBName = id - , psStrictFields = True - , psIdName = "id" - } - -upperCaseSettings = defaultPersistSettings - -lowerCaseSettings = defaultPersistSettings - { psToDBName = - let go c - | isUpper c = T.pack ['_', toLower c] - | otherwise = T.singleton c - in T.dropWhile (== '_') . T.concatMap go - } - --- | Parses a quasi-quoted syntax into a list of entity definitions. -parse :: PersistSettings -> Text -> [EntityDef] -parse ps = maybe [] (parseLines ps) . preparse - -preparse :: Text -> Maybe (NonEmpty Line) -preparse txt = do - lns <- NEL.nonEmpty (T.lines txt) - NEL.nonEmpty $ mapMaybe parseLine (NEL.toList lns) - -parseLine :: Text -> Maybe Line -parseLine txt = do - Line (parseIndentationAmount txt) <$> NEL.nonEmpty (tokenize txt) - --- | A token used by the parser. -data Token = Token Text -- ^ @Token tok@ is token @tok@ already unquoted. - | DocComment Text -- ^ @DocComment@ is a documentation comment, unmodified. - deriving (Show, Eq) - -tokenText :: Token -> Text -tokenText tok = - case tok of - Token t -> t - DocComment t -> "-- | " <> t - -parseIndentationAmount :: Text -> Int -parseIndentationAmount txt = - let (spaces, _) = T.span isSpace txt - in T.length spaces - --- | Tokenize a string. -tokenize :: Text -> [Token] -tokenize t - | T.null t = [] - | Just txt <- T.stripPrefix "-- | " t = [DocComment txt] - | "--" `T.isPrefixOf` t = [] -- Comment until the end of the line. - | "#" `T.isPrefixOf` t = [] -- Also comment to the end of the line, needed for a CPP bug (#110) - | T.head t == '"' = quotes (T.tail t) id - | T.head t == '(' = parens 1 (T.tail t) id - | isSpace (T.head t) = - tokenize (T.dropWhile isSpace t) - - -- support mid-token quotes and parens - | Just (beforeEquals, afterEquals) <- findMidToken t - , not (T.any isSpace beforeEquals) - , Token next : rest <- tokenize afterEquals = - Token (T.concat [beforeEquals, "=", next]) : rest - - | otherwise = - let (token, rest) = T.break isSpace t - in Token token : tokenize rest - where - findMidToken t' = - case T.break (== '=') t' of - (x, T.drop 1 -> y) - | "\"" `T.isPrefixOf` y || "(" `T.isPrefixOf` y -> Just (x, y) - _ -> Nothing - - quotes t' front - | T.null t' = error $ T.unpack $ T.concat $ - "Unterminated quoted string starting with " : front [] - | T.head t' == '"' = Token (T.concat $ front []) : tokenize (T.tail t') - | T.head t' == '\\' && T.length t' > 1 = - quotes (T.drop 2 t') (front . (T.take 1 (T.drop 1 t'):)) - | otherwise = - let (x, y) = T.break (`elem` ['\\','\"']) t' - in quotes y (front . (x:)) - parens count t' front - | T.null t' = error $ T.unpack $ T.concat $ - "Unterminated parens string starting with " : front [] - | T.head t' == ')' = - if count == (1 :: Int) - then Token (T.concat $ front []) : tokenize (T.tail t') - else parens (count - 1) (T.tail t') (front . (")":)) - | T.head t' == '(' = - parens (count + 1) (T.tail t') (front . ("(":)) - | T.head t' == '\\' && T.length t' > 1 = - parens count (T.drop 2 t') (front . (T.take 1 (T.drop 1 t'):)) - | otherwise = - let (x, y) = T.break (`elem` ['\\','(',')']) t' - in parens count y (front . (x:)) - --- | A line of parsed tokens -data Line = Line - { lineIndent :: Int - , tokens :: NonEmpty Token - } deriving (Eq, Show) - -lineText :: Line -> NonEmpty Text -lineText = fmap tokenText . tokens - -lowestIndent :: NonEmpty Line -> Int -lowestIndent = minimum . fmap lineIndent - --- | Divide lines into blocks and make entity definitions. -parseLines :: PersistSettings -> NonEmpty Line -> [EntityDef] -parseLines ps = - fixForeignKeysAll . map mk . associateLines - where - mk :: LinesWithComments -> UnboundEntityDef - mk lwc = - let ln :| rest = lwcLines lwc - (name :| entAttribs) = lineText ln - in setComments (lwcComments lwc) $ mkEntityDef ps name entAttribs rest - -isDocComment :: Token -> Maybe Text -isDocComment tok = - case tok of - DocComment txt -> Just txt - _ -> Nothing - -data LinesWithComments = LinesWithComments - { lwcLines :: NonEmpty Line - , lwcComments :: [Text] - } deriving (Eq, Show) - --- TODO: drop this and use <> when 8.2 isn't supported anymore so the --- monoid/semigroup nonsense isn't annoying -appendLwc :: LinesWithComments -> LinesWithComments -> LinesWithComments -appendLwc a b = - LinesWithComments (foldr NEL.cons (lwcLines b) (lwcLines a)) (lwcComments a `mappend` lwcComments b) - -newLine :: Line -> LinesWithComments -newLine l = LinesWithComments (pure l) [] - -firstLine :: LinesWithComments -> Line -firstLine = NEL.head . lwcLines - -consLine :: Line -> LinesWithComments -> LinesWithComments -consLine l lwc = lwc { lwcLines = NEL.cons l (lwcLines lwc) } - -consComment :: Text -> LinesWithComments -> LinesWithComments -consComment l lwc = lwc { lwcComments = l : lwcComments lwc } - -associateLines :: NonEmpty Line -> [LinesWithComments] -associateLines lines = - foldr combine [] $ - foldr toLinesWithComments [] lines - where - toLinesWithComments :: Line -> [LinesWithComments] -> [LinesWithComments] - toLinesWithComments line linesWithComments = - case linesWithComments of - [] -> - [newLine line] - (lwc : lwcs) -> - case isDocComment (NEL.head (tokens line)) of - Just comment - | lineIndent line == lowestIndent lines -> - consComment comment lwc : lwcs - _ -> - if lineIndent line <= lineIndent (firstLine lwc) - && lineIndent (firstLine lwc) /= lowestIndent lines - then - consLine line lwc : lwcs - else - newLine line : lwc : lwcs - - combine :: LinesWithComments -> [LinesWithComments] -> [LinesWithComments] - combine lwc [] = - [lwc] - combine lwc (lwc' : lwcs) = - let minIndent = minimumIndentOf lwc - otherIndent = minimumIndentOf lwc' - in - if minIndent < otherIndent then - appendLwc lwc lwc' : lwcs - else - lwc : lwc' : lwcs - - - minimumIndentOf = lowestIndent . lwcLines - -setComments :: [Text] -> UnboundEntityDef -> UnboundEntityDef -setComments [] = id -setComments comments = - overUnboundEntityDef (\ed -> ed { entityComments = Just (T.unlines comments) }) - -fixForeignKeysAll :: [UnboundEntityDef] -> [EntityDef] -fixForeignKeysAll unEnts = map fixForeignKeys unEnts - where - ents = map unboundEntityDef unEnts - entLookup = M.fromList $ map (\e -> (entityHaskell e, e)) ents - - fixForeignKeys :: UnboundEntityDef -> EntityDef - fixForeignKeys (UnboundEntityDef foreigns ent) = - ent { entityForeigns = map (fixForeignKey ent) foreigns } - - -- check the count and the sqltypes match and update the foreignFields with the names of the referenced columns - fixForeignKey :: EntityDef -> UnboundForeignDef -> ForeignDef - fixForeignKey ent (UnboundForeignDef foreignFieldTexts parentFieldTexts fdef) = - case mfdefs of - Just fdefs -> - if length foreignFieldTexts /= length fdefs - then - lengthError fdefs - else - let - fds_ffs = - zipWith toForeignFields - foreignFieldTexts - fdefs - dbname = - unEntityNameDB (entityDB pent) - oldDbName = - unEntityNameDB (foreignRefTableDBName fdef) - in fdef - { foreignFields = map snd fds_ffs - , foreignNullable = setNull $ map fst fds_ffs - , foreignRefTableDBName = - EntityNameDB dbname - , foreignConstraintNameDBName = - ConstraintNameDB - . T.replace oldDbName dbname . unConstraintNameDB - $ foreignConstraintNameDBName fdef - } - Nothing -> - error $ "no primary key found fdef="++show fdef++ " ent="++show ent - where - pentError = - error $ "could not find table " ++ show (foreignRefTableHaskell fdef) - ++ " fdef=" ++ show fdef ++ " allnames=" - ++ show (map (unEntityNameHS . entityHaskell . unboundEntityDef) unEnts) - ++ "\n\nents=" ++ show ents - pent = - fromMaybe pentError $ M.lookup (foreignRefTableHaskell fdef) entLookup - mfdefs = case parentFieldTexts of - [] -> entitiesPrimary pent - _ -> Just $ map (getFd pent . FieldNameHS) parentFieldTexts - - setNull :: [FieldDef] -> Bool - setNull [] = error "setNull: impossible!" - setNull (fd:fds) = let nullSetting = isNull fd in - if all ((nullSetting ==) . isNull) fds then nullSetting - else error $ "foreign key columns must all be nullable or non-nullable" - ++ show (map (unFieldNameHS . fieldHaskell) (fd:fds)) - isNull = (NotNullable /=) . nullable . fieldAttrs - - toForeignFields :: Text -> FieldDef - -> (FieldDef, (ForeignFieldDef, ForeignFieldDef)) - toForeignFields fieldText pfd = - case chktypes fd haskellField pfd of - Just err -> error err - Nothing -> (fd, ((haskellField, fieldDB fd), (pfh, pfdb))) - where - fd = getFd ent haskellField - - haskellField = FieldNameHS fieldText - (pfh, pfdb) = (fieldHaskell pfd, fieldDB pfd) - - chktypes ffld _fkey pfld = - if fieldType ffld == fieldType pfld then Nothing - else Just $ "fieldType mismatch: " ++ show (fieldType ffld) ++ ", " ++ show (fieldType pfld) - - getFd :: EntityDef -> FieldNameHS -> FieldDef - getFd entity t = go (keyAndEntityFields entity) - where - go [] = error $ "foreign key constraint for: " ++ show (unEntityNameHS $ entityHaskell entity) - ++ " unknown column: " ++ show t - go (f:fs) - | fieldHaskell f == t = f - | otherwise = go fs - - lengthError pdef = error $ "found " ++ show (length foreignFieldTexts) ++ " fkeys and " ++ show (length pdef) ++ " pkeys: fdef=" ++ show fdef ++ " pdef=" ++ show pdef - - -data UnboundEntityDef = UnboundEntityDef - { _unboundForeignDefs :: [UnboundForeignDef] - , unboundEntityDef :: EntityDef - } - -overUnboundEntityDef - :: (EntityDef -> EntityDef) -> UnboundEntityDef -> UnboundEntityDef -overUnboundEntityDef f ubed = - ubed { unboundEntityDef = f (unboundEntityDef ubed) } - -lookupKeyVal :: Text -> [Text] -> Maybe Text -lookupKeyVal key = lookupPrefix $ key `mappend` "=" - -lookupPrefix :: Text -> [Text] -> Maybe Text -lookupPrefix prefix = msum . map (T.stripPrefix prefix) - --- | Construct an entity definition. -mkEntityDef :: PersistSettings - -> Text -- ^ name - -> [Attr] -- ^ entity attributes - -> [Line] -- ^ indented lines - -> UnboundEntityDef -mkEntityDef ps name entattribs lines = - UnboundEntityDef foreigns $ - EntityDef - { entityHaskell = EntityNameHS name' - , entityDB = EntityNameDB $ getDbName ps name' entattribs - -- idField is the user-specified Id - -- otherwise useAutoIdField - -- but, adjust it if the user specified a Primary - , entityId = setComposite primaryComposite $ fromMaybe autoIdField idField - , entityAttrs = entattribs - , entityFields = cols - , entityUniques = uniqs - , entityForeigns = [] - , entityDerives = concat $ mapMaybe takeDerives textAttribs - , entityExtra = extras - , entitySum = isSum - , entityComments = Nothing - } - where - entName = EntityNameHS name' - (isSum, name') = - case T.uncons name of - Just ('+', x) -> (True, x) - _ -> (False, name) - (attribs, extras) = splitExtras lines - - textAttribs :: [[Text]] - textAttribs = - fmap tokenText <$> attribs - - attribPrefix = flip lookupKeyVal entattribs - idName | Just _ <- attribPrefix "id" = error "id= is deprecated, ad a field named 'Id' and use sql=" - | otherwise = Nothing - - (idField, primaryComposite, uniqs, foreigns) = foldl' (\(mid, mp, us, fs) attr -> - let (i, p, u, f) = takeConstraint ps name' cols attr - squish xs m = xs `mappend` maybeToList m - in (just1 mid i, just1 mp p, squish us u, squish fs f)) (Nothing, Nothing, [],[]) textAttribs - - cols :: [FieldDef] - cols = reverse . fst . foldr k ([], []) $ reverse attribs - - k x (!acc, !comments) = - case listToMaybe x of - Just (DocComment comment) -> - (acc, comment : comments) - _ -> - case (setFieldComments comments <$> takeColsEx ps (tokenText <$> x)) of - Just sm -> - (sm : acc, []) - Nothing -> - (acc, []) - - autoIdField = mkAutoIdField ps entName (FieldNameDB `fmap` idName) idSqlType - idSqlType = maybe SqlInt64 (const $ SqlOther "Primary Key") primaryComposite - - setComposite Nothing fd = fd - setComposite (Just c) fd = fd - { fieldReference = CompositeRef c - } - -setFieldComments :: [Text] -> FieldDef -> FieldDef -setFieldComments xs fld = - case xs of - [] -> fld - _ -> fld { fieldComments = Just (T.unlines xs) } - -just1 :: (Show x) => Maybe x -> Maybe x -> Maybe x -just1 (Just x) (Just y) = error $ "expected only one of: " - `mappend` show x `mappend` " " `mappend` show y -just1 x y = x `mplus` y - -mkAutoIdField :: PersistSettings -> EntityNameHS -> Maybe FieldNameDB -> SqlType -> FieldDef -mkAutoIdField ps entName idName idSqlType = - FieldDef - { fieldHaskell = FieldNameHS "Id" - -- this should be modeled as a Maybe - -- but that sucks for non-ID field - -- TODO: use a sumtype FieldDef | IdFieldDef - , fieldDB = fromMaybe (FieldNameDB $ psIdName ps) idName - , fieldType = FTTypeCon Nothing $ keyConName $ unEntityNameHS entName - , fieldSqlType = idSqlType - -- the primary field is actually a reference to the entity - , fieldReference = ForeignRef entName defaultReferenceTypeCon - , fieldAttrs = [] - , fieldStrict = True - , fieldComments = Nothing - , fieldCascade = noCascade - , fieldGenerated = Nothing - } - -defaultReferenceTypeCon :: FieldType -defaultReferenceTypeCon = FTTypeCon (Just "Data.Int") "Int64" - -keyConName :: Text -> Text -keyConName entName = entName `mappend` "Id" - -splitExtras - :: [Line] - -> ( [[Token]] - , M.Map Text [ExtraLine] - ) -splitExtras lns = - case lns of - [] -> ([], M.empty) - (line : rest) -> - case NEL.toList (tokens line) of - [Token name] - | isCapitalizedText name -> - let indent = lineIndent line - (children, rest') = span ((> indent) . lineIndent) rest - (x, y) = splitExtras rest' - in (x, M.insert name (NEL.toList . lineText <$> children) y) - ts -> - let (x, y) = splitExtras rest - in (ts:x, y) - -isCapitalizedText :: Text -> Bool -isCapitalizedText t = - not (T.null t) && isUpper (T.head t) - -takeColsEx :: PersistSettings -> [Text] -> Maybe FieldDef -takeColsEx = - takeCols - (\ft perr -> error $ "Invalid field type " ++ show ft ++ " " ++ perr) - -takeCols - :: (Text -> String -> Maybe FieldDef) - -> PersistSettings - -> [Text] - -> Maybe FieldDef -takeCols _ _ ("deriving":_) = Nothing -takeCols onErr ps (n':typ:rest') - | not (T.null n) && isLower (T.head n) = - case parseFieldType typ of - Left err -> onErr typ err - Right ft -> Just FieldDef - { fieldHaskell = FieldNameHS n - , fieldDB = FieldNameDB $ getDbName ps n attrs_ - , fieldType = ft - , fieldSqlType = SqlOther $ "SqlType unset for " `mappend` n - , fieldAttrs = fieldAttrs_ - , fieldStrict = fromMaybe (psStrictFields ps) mstrict - , fieldReference = NoReference - , fieldComments = Nothing - , fieldCascade = cascade_ - , fieldGenerated = generated_ - } - where - fieldAttrs_ = parseFieldAttrs attrs_ - generated_ = parseGenerated attrs_ - (cascade_, attrs_) = parseCascade rest' - (mstrict, n) - | Just x <- T.stripPrefix "!" n' = (Just True, x) - | Just x <- T.stripPrefix "~" n' = (Just False, x) - | otherwise = (Nothing, n') - -takeCols _ _ _ = Nothing - -parseGenerated :: [Text] -> Maybe Text -parseGenerated = foldl' (\acc x -> acc <|> T.stripPrefix "generated=" x) Nothing - -getDbName :: PersistSettings -> Text -> [Text] -> Text -getDbName ps n [] = psToDBName ps n -getDbName ps n (a:as) = fromMaybe (getDbName ps n as) $ T.stripPrefix "sql=" a - -takeConstraint :: PersistSettings - -> Text - -> [FieldDef] - -> [Text] - -> (Maybe FieldDef, Maybe CompositeDef, Maybe UniqueDef, Maybe UnboundForeignDef) -takeConstraint ps tableName defs (n:rest) | isCapitalizedText n = takeConstraint' - where - takeConstraint' - | n == "Unique" = (Nothing, Nothing, Just $ takeUniq ps tableName defs rest, Nothing) - | n == "Foreign" = (Nothing, Nothing, Nothing, Just $ takeForeign ps tableName defs rest) - | n == "Primary" = (Nothing, Just $ takeComposite defs rest, Nothing, Nothing) - | n == "Id" = (Just $ takeId ps tableName (n:rest), Nothing, Nothing, Nothing) - | otherwise = (Nothing, Nothing, Just $ takeUniq ps "" defs (n:rest), Nothing) -- retain compatibility with original unique constraint -takeConstraint _ _ _ _ = (Nothing, Nothing, Nothing, Nothing) - --- TODO: this is hacky (the double takeCols, the setFieldDef stuff, and setIdName. --- need to re-work takeCols function -takeId :: PersistSettings -> Text -> [Text] -> FieldDef -takeId ps tableName (n:rest) = - setFieldDef - $ fromMaybe (error "takeId: impossible!") - $ takeCols (\_ _ -> addDefaultIdType) ps (field:rest) -- `mappend` setIdName) - where - field = case T.uncons n of - Nothing -> error "takeId: empty field" - Just (f, ield) -> toLower f `T.cons` ield - addDefaultIdType = takeColsEx ps (field : keyCon : rest ) -- `mappend` setIdName) - setFieldDef fd = fd - { fieldReference = - ForeignRef (EntityNameHS tableName) $ - if fieldType fd == FTTypeCon Nothing keyCon - then defaultReferenceTypeCon - else fieldType fd - } - keyCon = keyConName tableName - -- this will be ignored if there is already an existing sql= - -- TODO: I think there is a ! ignore syntax that would screw this up - -- setIdName = ["sql=" `mappend` psIdName ps] -takeId _ tableName _ = error $ "empty Id field for " `mappend` show tableName - - -takeComposite - :: [FieldDef] - -> [Text] - -> CompositeDef -takeComposite fields pkcols = - CompositeDef (map (getDef fields) pkcols) attrs - where - (_, attrs) = break ("!" `T.isPrefixOf`) pkcols - getDef [] t = error $ "Unknown column in primary key constraint: " ++ show t - getDef (d:ds) t - | fieldHaskell d == FieldNameHS t = - if nullable (fieldAttrs d) /= NotNullable - then error $ "primary key column cannot be nullable: " ++ show t ++ show fields - else d - | otherwise = getDef ds t - --- Unique UppercaseConstraintName list of lowercasefields terminated --- by ! or sql= such that a unique constraint can look like: --- `UniqueTestNull fieldA fieldB sql=ConstraintNameInDatabase !force` --- Here using sql= sets the name of the constraint. -takeUniq :: PersistSettings - -> Text - -> [FieldDef] - -> [Text] - -> UniqueDef -takeUniq ps tableName defs (n:rest) - | isCapitalizedText n - = UniqueDef - (ConstraintNameHS n) - dbName - (map (FieldNameHS &&& getDBName defs) fields) - attrs - where - isAttr a = - "!" `T.isPrefixOf` a - isSqlName a = - "sql=" `T.isPrefixOf` a - isNonField a = - isAttr a - || isSqlName a - (fields, nonFields) = - break isNonField rest - attrs = filter isAttr nonFields - usualDbName = - ConstraintNameDB $ psToDBName ps (tableName `T.append` n) - sqlName :: Maybe ConstraintNameDB - sqlName = - case find isSqlName nonFields of - Nothing -> - Nothing - (Just t) -> - case drop 1 $ T.splitOn "=" t of - (x : _) -> Just (ConstraintNameDB x) - _ -> Nothing - dbName = fromMaybe usualDbName sqlName - getDBName [] t = - error $ "Unknown column in unique constraint: " ++ show t - ++ " " ++ show defs ++ show n ++ " " ++ show attrs - getDBName (d:ds) t - | fieldHaskell d == FieldNameHS t = fieldDB d - | otherwise = getDBName ds t -takeUniq _ tableName _ xs = - error $ "invalid unique constraint on table[" - ++ show tableName - ++ "] expecting an uppercase constraint name xs=" - ++ show xs - -data UnboundForeignDef = UnboundForeignDef - { _unboundForeignFields :: [Text] -- ^ fields in the parent entity - , _unboundParentFields :: [Text] -- ^ fields in parent entity - , _unboundForeignDef :: ForeignDef - } - -takeForeign - :: PersistSettings - -> Text - -> [FieldDef] - -> [Text] - -> UnboundForeignDef -takeForeign ps tableName _defs = takeRefTable - where - errorPrefix :: String - errorPrefix = "invalid foreign key constraint on table[" ++ show tableName ++ "] " - - takeRefTable :: [Text] -> UnboundForeignDef - takeRefTable [] = error $ errorPrefix ++ " expecting foreign table name" - takeRefTable (refTableName:restLine) = go restLine Nothing Nothing - where - go :: [Text] -> Maybe CascadeAction -> Maybe CascadeAction -> UnboundForeignDef - go (n:rest) onDelete onUpdate | not (T.null n) && isLower (T.head n) - = UnboundForeignDef fFields pFields $ ForeignDef - { foreignRefTableHaskell = - EntityNameHS refTableName - , foreignRefTableDBName = - EntityNameDB $ psToDBName ps refTableName - , foreignConstraintNameHaskell = - ConstraintNameHS n - , foreignConstraintNameDBName = - ConstraintNameDB $ psToDBName ps (tableName `T.append` n) - , foreignFieldCascade = FieldCascade - { fcOnDelete = onDelete - , fcOnUpdate = onUpdate - } - , foreignFields = - [] - , foreignAttrs = - attrs - , foreignNullable = - False - , foreignToPrimary = - null pFields - } - where - (fields,attrs) = break ("!" `T.isPrefixOf`) rest - (fFields, pFields) = case break (== "References") fields of - (ffs, []) -> (ffs, []) - (ffs, _ : pfs) -> case (length ffs, length pfs) of - (flen, plen) | flen == plen -> (ffs, pfs) - (flen, plen) -> error $ errorPrefix ++ concat - [ "Found ", show flen, " foreign fields but " - , show plen, " parent fields" ] - - go ((parseCascadeAction CascadeDelete -> Just cascadingAction) : rest) onDelete' onUpdate = - case onDelete' of - Nothing -> - go rest (Just cascadingAction) onUpdate - Just _ -> - error $ errorPrefix ++ "found more than one OnDelete actions" - - go ((parseCascadeAction CascadeUpdate -> Just cascadingAction) : rest) onDelete onUpdate' = - case onUpdate' of - Nothing -> - go rest onDelete (Just cascadingAction) - Just _ -> - error $ errorPrefix ++ "found more than one OnUpdate actions" - - go xs _ _ = error $ errorPrefix ++ "expecting a lower case constraint name or a cascading action xs=" ++ show xs - -data CascadePrefix = CascadeUpdate | CascadeDelete - -parseCascade :: [Text] -> (FieldCascade, [Text]) -parseCascade allTokens = - go [] Nothing Nothing allTokens - where - go acc mupd mdel tokens_ = - case tokens_ of - [] -> - ( FieldCascade - { fcOnDelete = mdel - , fcOnUpdate = mupd - } - , acc - ) - this : rest -> - case parseCascadeAction CascadeUpdate this of - Just cascUpd -> - case mupd of - Nothing -> - go acc (Just cascUpd) mdel rest - Just _ -> - nope "found more than one OnUpdate action" - Nothing -> - case parseCascadeAction CascadeDelete this of - Just cascDel -> - case mdel of - Nothing -> - go acc mupd (Just cascDel) rest - Just _ -> - nope "found more than one OnDelete action: " - Nothing -> - go (this : acc) mupd mdel rest - nope msg = - error $ msg <> ", tokens: " <> show allTokens - -parseCascadeAction - :: CascadePrefix - -> Text - -> Maybe CascadeAction -parseCascadeAction prfx text = do - cascadeStr <- T.stripPrefix ("On" <> toPrefix prfx) text - case readEither (T.unpack cascadeStr) of - Right a -> - Just a - Left _ -> - Nothing - where - toPrefix cp = - case cp of - CascadeUpdate -> "Update" - CascadeDelete -> "Delete" - -takeDerives :: [Text] -> Maybe [Text] -takeDerives ("deriving":rest) = Just rest -takeDerives _ = Nothing - -nullable :: [FieldAttr] -> IsNullable -nullable s - | FieldAttrMaybe `elem` s = Nullable ByMaybeAttr - | FieldAttrNullable `elem` s = Nullable ByNullableAttr - | otherwise = NotNullable +import Database.Persist.Names +import Database.Persist.Quasi.Internal + +-- | Retrieve the function in the 'PersistSettings' that modifies the names into +-- database names. +-- +-- @since 2.13.0.0 +getPsToDBName :: PersistSettings -> Text -> Text +getPsToDBName = psToDBName + +-- | Set the name modification function that translates the QuasiQuoted names +-- for use in the database. +-- +-- @since 2.13.0.0 +setPsToDBName :: (Text -> Text) -> PersistSettings -> PersistSettings +setPsToDBName f ps = ps { psToDBName = f } + +-- | Set a custom function used to create the constraint name +-- for a foreign key. +-- +-- @since 2.13.0.0 +setPsToFKName :: (EntityNameHS -> ConstraintNameHS -> Text) -> PersistSettings -> PersistSettings +setPsToFKName setter ps = ps { psToFKName = setter } + +-- | A preset configuration function that puts an underscore +-- between the entity name and the constraint name when +-- creating a foreign key constraint name +-- +-- @since 2.13.0.0 +setPsUseSnakeCaseForiegnKeys :: PersistSettings -> PersistSettings +setPsUseSnakeCaseForiegnKeys = setPsToFKName (toFKNameInfixed "_") + +-- | Retrieve whether or not the 'PersistSettings' will generate code with +-- strict fields. +-- +-- @since 2.13.0.0 +getPsStrictFields :: PersistSettings -> Bool +getPsStrictFields = psStrictFields + +-- | Set whether or not the 'PersistSettings' will make fields strict. +-- +-- @since 2.13.0.0 +setPsStrictFields :: Bool -> PersistSettings -> PersistSettings +setPsStrictFields a ps = ps { psStrictFields = a } + +-- | Retrievce the default name of the @id@ column. +-- +-- @since 2.13.0.0 +getPsIdName :: PersistSettings -> Text +getPsIdName = psIdName + +-- | Set the default name of the @id@ column. +-- +-- @since 2.13.0.0 +setPsIdName :: Text -> PersistSettings -> PersistSettings +setPsIdName n ps = ps { psIdName = n } diff --git a/persistent/Database/Persist/Quasi/Internal.hs b/persistent/Database/Persist/Quasi/Internal.hs new file mode 100644 index 000000000..299a0cc04 --- /dev/null +++ b/persistent/Database/Persist/Quasi/Internal.hs @@ -0,0 +1,1383 @@ +{-# LANGUAGE BangPatterns #-} +{-# LANGUAGE DeriveLift #-} +{-# LANGUAGE PatternGuards #-} +{-# LANGUAGE RankNTypes #-} +{-# LANGUAGE RecordWildCards #-} +{-# LANGUAGE StandaloneDeriving #-} +{-# LANGUAGE StrictData #-} +{-# LANGUAGE UndecidableInstances #-} +{-# LANGUAGE ViewPatterns #-} + +-- | This @Internal@ module may have breaking changes that will not be reflected +-- in major version bumps. Please use "Database.Persist.Quasi" instead. If you +-- need something in this module, please file an issue on GitHub. +-- +-- @since 2.13.0.0 +module Database.Persist.Quasi.Internal + ( parse + , PersistSettings (..) + , upperCaseSettings + , lowerCaseSettings + , toFKNameInfixed + , nullable + , Token (..) + , Line (..) + , preparse + , parseLine + , parseFieldType + , associateLines + , LinesWithComments(..) + , splitExtras + , takeColsEx + -- * UnboundEntityDef + , UnboundEntityDef(..) + , getUnboundEntityNameHS + , unbindEntityDef + , getUnboundFieldDefs + , UnboundForeignDef(..) + , getSqlNameOr + , UnboundFieldDef(..) + , UnboundCompositeDef(..) + , UnboundIdDef(..) + , unbindFieldDef + , unboundIdDefToFieldDef + , PrimarySpec(..) + , mkAutoIdField' + , UnboundForeignFieldList(..) + , ForeignFieldReference(..) + , mkKeyConType + , isHaskellUnboundField + ) where + +import Prelude hiding (lines) + +import Control.Applicative (Alternative((<|>))) +import Control.Monad (mplus) +import Data.Char (isLower, isSpace, isUpper, toLower) +import Data.List (find, foldl') +import Data.List.NonEmpty (NonEmpty(..)) +import qualified Data.List.NonEmpty as NEL +import qualified Data.Map as M +import Data.Maybe (fromMaybe, listToMaybe, mapMaybe, maybeToList) +import Data.Monoid (mappend) +import Data.Text (Text) +import qualified Data.Text as T +import Database.Persist.EntityDef.Internal +import Database.Persist.Types +import Language.Haskell.TH.Syntax (Lift) +import Text.Read (readEither) + +data ParseState a = PSDone | PSFail String | PSSuccess a Text deriving Show + +parseFieldType :: Text -> Either String FieldType +parseFieldType t0 = + case parseApplyFT t0 of + PSSuccess ft t' + | T.all isSpace t' -> Right ft + PSFail err -> Left $ "PSFail " ++ err + other -> Left $ show other + where + parseApplyFT t = + case goMany id t of + PSSuccess (ft:fts) t' -> PSSuccess (foldl' FTApp ft fts) t' + PSSuccess [] _ -> PSFail "empty" + PSFail err -> PSFail err + PSDone -> PSDone + + parseEnclosed :: Char -> (FieldType -> FieldType) -> Text -> ParseState FieldType + parseEnclosed end ftMod t = + let (a, b) = T.break (== end) t + in case parseApplyFT a of + PSSuccess ft t' -> case (T.dropWhile isSpace t', T.uncons b) of + ("", Just (c, t'')) | c == end -> PSSuccess (ftMod ft) (t'' `Data.Monoid.mappend` t') + (x, y) -> PSFail $ show (b, x, y) + x -> PSFail $ show x + + parse1 t = + case T.uncons t of + Nothing -> PSDone + Just (c, t') + | isSpace c -> parse1 $ T.dropWhile isSpace t' + | c == '(' -> parseEnclosed ')' id t' + | c == '[' -> parseEnclosed ']' FTList t' + | isUpper c || c == '\'' -> + let (a, b) = T.break (\x -> isSpace x || x `elem` ("()[]"::String)) t' + in PSSuccess (parseFieldTypePiece c a) b + | otherwise -> PSFail $ show (c, t') + + goMany front t = + case parse1 t of + PSSuccess x t' -> goMany (front . (x:)) t' + PSFail err -> PSFail err + PSDone -> PSSuccess (front []) t + -- _ -> + +parseFieldTypePiece :: Char -> Text -> FieldType +parseFieldTypePiece fstChar rest = + case fstChar of + '\'' -> + FTTypePromoted rest + _ -> + let t = T.cons fstChar rest + in case T.breakOnEnd "." t of + (_, "") -> FTTypeCon Nothing t + ("", _) -> FTTypeCon Nothing t + (a, b) -> FTTypeCon (Just $ T.init a) b + +data PersistSettings = PersistSettings + { psToDBName :: !(Text -> Text) + -- ^ Modify the Haskell-style name into a database-style name. + , psToFKName :: !(EntityNameHS -> ConstraintNameHS -> Text) + -- ^ A function for generating the constraint name, with access to + -- the entity and constraint names. Default value: @mappend@ + -- + -- @since 2.13.0.0 + , psStrictFields :: !Bool + -- ^ Whether fields are by default strict. Default value: @True@. + -- + -- @since 1.2 + , psIdName :: !Text + -- ^ The name of the id column. Default value: @id@ + -- The name of the id column can also be changed on a per-model basis + -- + -- + -- @since 2.0 + } + +defaultPersistSettings, upperCaseSettings, lowerCaseSettings :: PersistSettings +defaultPersistSettings = PersistSettings + { psToDBName = id + , psToFKName = \(EntityNameHS entName) (ConstraintNameHS conName) -> entName <> conName + , psStrictFields = True + , psIdName = "id" + } + +upperCaseSettings = defaultPersistSettings + +lowerCaseSettings = defaultPersistSettings + { psToDBName = + let go c + | isUpper c = T.pack ['_', toLower c] + | otherwise = T.singleton c + in T.dropWhile (== '_') . T.concatMap go + } + +toFKNameInfixed :: Text -> EntityNameHS -> ConstraintNameHS -> Text +toFKNameInfixed inf (EntityNameHS entName) (ConstraintNameHS conName) = + entName <> inf <> conName + +-- | Parses a quasi-quoted syntax into a list of entity definitions. +parse :: PersistSettings -> Text -> [UnboundEntityDef] +parse ps = maybe [] (parseLines ps) . preparse + +preparse :: Text -> Maybe (NonEmpty Line) +preparse txt = do + lns <- NEL.nonEmpty (T.lines txt) + NEL.nonEmpty $ mapMaybe parseLine (NEL.toList lns) + +parseLine :: Text -> Maybe Line +parseLine txt = do + Line (parseIndentationAmount txt) <$> NEL.nonEmpty (tokenize txt) + +-- | A token used by the parser. +data Token = Token Text -- ^ @Token tok@ is token @tok@ already unquoted. + | DocComment Text -- ^ @DocComment@ is a documentation comment, unmodified. + deriving (Show, Eq) + +tokenText :: Token -> Text +tokenText tok = + case tok of + Token t -> t + DocComment t -> "-- | " <> t + +parseIndentationAmount :: Text -> Int +parseIndentationAmount txt = + let (spaces, _) = T.span isSpace txt + in T.length spaces + +-- | Tokenize a string. +tokenize :: Text -> [Token] +tokenize t + | T.null t = [] + | Just txt <- T.stripPrefix "-- | " t = [DocComment txt] + | "--" `T.isPrefixOf` t = [] -- Comment until the end of the line. + | "#" `T.isPrefixOf` t = [] -- Also comment to the end of the line, needed for a CPP bug (#110) + | T.head t == '"' = quotes (T.tail t) id + | T.head t == '(' = parens 1 (T.tail t) id + | isSpace (T.head t) = + tokenize (T.dropWhile isSpace t) + + -- support mid-token quotes and parens + | Just (beforeEquals, afterEquals) <- findMidToken t + , not (T.any isSpace beforeEquals) + , Token next : rest <- tokenize afterEquals = + Token (T.concat [beforeEquals, "=", next]) : rest + + | otherwise = + let (token, rest) = T.break isSpace t + in Token token : tokenize rest + where + findMidToken t' = + case T.break (== '=') t' of + (x, T.drop 1 -> y) + | "\"" `T.isPrefixOf` y || "(" `T.isPrefixOf` y -> Just (x, y) + _ -> Nothing + + quotes t' front + | T.null t' = error $ T.unpack $ T.concat $ + "Unterminated quoted string starting with " : front [] + | T.head t' == '"' = Token (T.concat $ front []) : tokenize (T.tail t') + | T.head t' == '\\' && T.length t' > 1 = + quotes (T.drop 2 t') (front . (T.take 1 (T.drop 1 t'):)) + | otherwise = + let (x, y) = T.break (`elem` ['\\','\"']) t' + in quotes y (front . (x:)) + parens count t' front + | T.null t' = error $ T.unpack $ T.concat $ + "Unterminated parens string starting with " : front [] + | T.head t' == ')' = + if count == (1 :: Int) + then Token (T.concat $ front []) : tokenize (T.tail t') + else parens (count - 1) (T.tail t') (front . (")":)) + | T.head t' == '(' = + parens (count + 1) (T.tail t') (front . ("(":)) + | T.head t' == '\\' && T.length t' > 1 = + parens count (T.drop 2 t') (front . (T.take 1 (T.drop 1 t'):)) + | otherwise = + let (x, y) = T.break (`elem` ['\\','(',')']) t' + in parens count y (front . (x:)) + +-- | A line of parsed tokens +data Line = Line + { lineIndent :: Int + , tokens :: NonEmpty Token + } deriving (Eq, Show) + +lineText :: Line -> NonEmpty Text +lineText = fmap tokenText . tokens + +lowestIndent :: NonEmpty Line -> Int +lowestIndent = minimum . fmap lineIndent + +-- | Divide lines into blocks and make entity definitions. +parseLines :: PersistSettings -> NonEmpty Line -> [UnboundEntityDef] +parseLines ps = do + fmap (mkUnboundEntityDef ps . toParsedEntityDef) . associateLines + +data ParsedEntityDef = ParsedEntityDef + { parsedEntityDefComments :: [Text] + , parsedEntityDefEntityName :: EntityNameHS + , parsedEntityDefIsSum :: Bool + , parsedEntityDefEntityAttributes :: [Attr] + , parsedEntityDefFieldAttributes :: [[Token]] + , parsedEntityDefExtras :: M.Map Text [ExtraLine] + } + +entityNamesFromParsedDef :: PersistSettings -> ParsedEntityDef -> (EntityNameHS, EntityNameDB) +entityNamesFromParsedDef ps parsedEntDef = (entNameHS, entNameDB) + where + entNameHS = + parsedEntityDefEntityName parsedEntDef + + entNameDB = + EntityNameDB $ getDbName ps (unEntityNameHS entNameHS) (parsedEntityDefEntityAttributes parsedEntDef) + +toParsedEntityDef :: LinesWithComments -> ParsedEntityDef +toParsedEntityDef lwc = ParsedEntityDef + { parsedEntityDefComments = lwcComments lwc + , parsedEntityDefEntityName = entNameHS + , parsedEntityDefIsSum = isSum + , parsedEntityDefEntityAttributes = entAttribs + , parsedEntityDefFieldAttributes = attribs + , parsedEntityDefExtras = extras + } + where + entityLine :| fieldLines = + lwcLines lwc + + (entityName :| entAttribs) = + lineText entityLine + + (isSum, entNameHS) = + case T.uncons entityName of + Just ('+', x) -> (True, EntityNameHS x) + _ -> (False, EntityNameHS entityName) + + (attribs, extras) = + splitExtras fieldLines + +isDocComment :: Token -> Maybe Text +isDocComment tok = + case tok of + DocComment txt -> Just txt + _ -> Nothing + +data LinesWithComments = LinesWithComments + { lwcLines :: NonEmpty Line + , lwcComments :: [Text] + } deriving (Eq, Show) + +instance Semigroup LinesWithComments where + a <> b = + LinesWithComments + { lwcLines = + foldr NEL.cons (lwcLines b) (lwcLines a) + , lwcComments = + lwcComments a `mappend` lwcComments b + } + +appendLwc :: LinesWithComments -> LinesWithComments -> LinesWithComments +appendLwc = (<>) + +newLine :: Line -> LinesWithComments +newLine l = LinesWithComments (pure l) [] + +firstLine :: LinesWithComments -> Line +firstLine = NEL.head . lwcLines + +consLine :: Line -> LinesWithComments -> LinesWithComments +consLine l lwc = lwc { lwcLines = NEL.cons l (lwcLines lwc) } + +consComment :: Text -> LinesWithComments -> LinesWithComments +consComment l lwc = lwc { lwcComments = l : lwcComments lwc } + +associateLines :: NonEmpty Line -> [LinesWithComments] +associateLines lines = + foldr combine [] $ + foldr toLinesWithComments [] lines + where + toLinesWithComments :: Line -> [LinesWithComments] -> [LinesWithComments] + toLinesWithComments line linesWithComments = + case linesWithComments of + [] -> + [newLine line] + (lwc : lwcs) -> + case isDocComment (NEL.head (tokens line)) of + Just comment + | lineIndent line == lowestIndent lines -> + consComment comment lwc : lwcs + _ -> + if lineIndent line <= lineIndent (firstLine lwc) + && lineIndent (firstLine lwc) /= lowestIndent lines + then + consLine line lwc : lwcs + else + newLine line : lwc : lwcs + + combine :: LinesWithComments -> [LinesWithComments] -> [LinesWithComments] + combine lwc [] = + [lwc] + combine lwc (lwc' : lwcs) = + let minIndent = minimumIndentOf lwc + otherIndent = minimumIndentOf lwc' + in + if minIndent < otherIndent then + appendLwc lwc lwc' : lwcs + else + lwc : lwc' : lwcs + + + minimumIndentOf = lowestIndent . lwcLines + +-- | An 'EntityDef' produced by the QuasiQuoter. It contains information that +-- the QuasiQuoter is capable of knowing about the entities. It is inherently +-- unfinished, though - there are many other @Unbound@ datatypes that also +-- contain partial information. +-- +-- The 'unboundEntityDef' is not complete or reliable - to know which fields are +-- safe to use, consult the parsing code. +-- +-- This type was completely internal until 2.13.0.0, when it was exposed as part +-- of the "Database.Persist.Quasi.Internal" module. +-- +-- TODO: refactor this so we can expose it for consumers. +-- +-- @since 2.13.0.0 +data UnboundEntityDef + = UnboundEntityDef + { unboundForeignDefs :: [UnboundForeignDef] + -- ^ A list of foreign definitions on the parsed entity. + -- + -- @since 2.13.0.0 + , unboundPrimarySpec :: PrimarySpec + -- ^ The specification for the primary key of the unbound entity. + -- + -- @since 2.13.0.0 + , unboundEntityDef :: EntityDef + -- ^ The incomplete and partial 'EntityDef' that we're defining. We re-use + -- the type here to prevent duplication, but several of the fields are unset + -- and left to defaults. + -- + -- @since 2.13.0.0 + , unboundEntityFields :: [UnboundFieldDef] + -- ^ The list of fields for the entity. We're not capable of knowing + -- information like "is this a reference?" or "what's the underlying type of + -- the field?" yet, so we defer those to the Template Haskell execution. + -- + -- @since 2.13.0.0 + } + deriving (Show, Lift) + +-- | Convert an 'EntityDef' into an 'UnboundEntityDef'. This "forgets" +-- information about the 'EntityDef', but it is all kept present on the +-- 'unboundEntityDef' field if necessary. +-- +-- @since 2.13.0.0 +unbindEntityDef :: EntityDef -> UnboundEntityDef +unbindEntityDef ed = + UnboundEntityDef + { unboundForeignDefs = + map unbindForeignDef (entityForeigns ed) + , unboundPrimarySpec = + case entityId ed of + EntityIdField fd -> + SurrogateKey (unbindIdDef (entityHaskell ed) fd) + EntityIdNaturalKey cd -> + NaturalKey (unbindCompositeDef cd) + , unboundEntityDef = + ed + , unboundEntityFields = + map unbindFieldDef (entityFields ed) + } + +-- | Returns the @['UnboundFieldDef']@ for an 'UnboundEntityDef'. This returns +-- all fields defined on the entity. +-- +-- @since 2.13.0.0 +getUnboundFieldDefs :: UnboundEntityDef -> [UnboundFieldDef] +getUnboundFieldDefs = unboundEntityFields + +-- | This function forgets information about the 'CompositeDef' so that it can +-- be remembered through Template Haskell. +-- +-- @since 2.13.0.0 +unbindCompositeDef :: CompositeDef -> UnboundCompositeDef +unbindCompositeDef cd = + UnboundCompositeDef + { unboundCompositeCols = + NEL.toList $ fmap fieldHaskell (compositeFields cd) + , unboundCompositeAttrs = + compositeAttrs cd + } + +-- | A representation of a database column, with everything that can be known at +-- parse time. +-- +-- @since 2.13.0.0 +data UnboundFieldDef + = UnboundFieldDef + { unboundFieldNameHS :: FieldNameHS + -- ^ The Haskell name of the field. This is parsed directly from the + -- definition, and is used to generate the Haskell record field and the + -- 'EntityField' definition. + -- + -- @since 2.13.0.0 + , unboundFieldNameDB :: FieldNameDB + -- ^ The database name of the field. By default, this is determined by the + -- 'PersistSettings' record at parse time. You can customize this with + -- a @sql=@ attribute: + -- + -- @ + -- name Text sql=foo_name + -- @ + -- + -- @since 2.13.0.0 + , unboundFieldAttrs :: [FieldAttr] + -- ^ The attributes present on the field. For rules on parsing and utility, + -- see the comments on the datatype. + -- + -- @since 2.13.0.0 + , unboundFieldStrict :: Bool + -- ^ Whether or not the field should be strict in the generated Haskell + -- code. + -- + -- @since 2.13.0.0 + , unboundFieldType :: FieldType + -- ^ The type of the field, as far as is known at parse time. + -- + -- The TemplateHaskell code will reconstruct a 'Type' out of this, but the + -- names will be imported as-is. + -- + -- @since 2.13.0.0 + , unboundFieldCascade :: FieldCascade + -- ^ We parse if there's a 'FieldCascade' on the field. If the field is not + -- a reference, this information is ignored. + -- + -- @ + -- Post + -- user UserId OnDeleteCascade + -- @ + -- + -- @since 2.13.0.0 + , unboundFieldGenerated :: Maybe Text + -- ^ Contains an expression to generate the column. If this is present, then + -- the column will not be written to the database, but generated by the + -- expression every time. + -- + -- @ + -- Item + -- subtotal Int + -- taxRate Rational + -- total Int generated="subtotal * tax_rate" + -- @ + -- + -- @since 2.13.0.0 + , unboundFieldComments :: Maybe Text + -- ^ Any comments present on the field. Documentation comments use + -- a Haskell-like syntax, and must be present before the field in question. + -- + -- @ + -- Post + -- -- | This is the blog post title. + -- title Text + -- -- | You can have multi-line comments. + -- -- | But each line must have the pipe character. + -- author UserId + -- @ + -- + -- @since 2.13.0.0 + } + deriving (Eq, Show, Lift) + +-- | Forget innformation about a 'FieldDef' so it can beused as an +-- 'UnboundFieldDef'. +-- +-- @since 2.13.0.0 +unbindFieldDef :: FieldDef -> UnboundFieldDef +unbindFieldDef fd = UnboundFieldDef + { unboundFieldNameHS = + fieldHaskell fd + , unboundFieldNameDB = + fieldDB fd + , unboundFieldAttrs = + fieldAttrs fd + , unboundFieldType = + fieldType fd + , unboundFieldStrict = + fieldStrict fd + , unboundFieldCascade = + fieldCascade fd + , unboundFieldComments = + fieldComments fd + , unboundFieldGenerated = + fieldGenerated fd + } + +-- | The specification for how an entity's primary key should be formed. +-- +-- Persistent requires that every table have a primary key. By default, an +-- implied ID is assigned, based on the 'mpsImplicitIdDef' field on +-- 'MkPersistSettings'. Because we can't access that type at parse-time, we +-- defer that decision until later. +-- +-- @since 2.13.0.0 +data PrimarySpec + = NaturalKey UnboundCompositeDef + -- ^ A 'NaturalKey' contains columns that are defined on the datatype + -- itself. This is defined using the @Primary@ keyword and given a non-empty + -- list of columns. + -- + -- @ + -- User + -- name Text + -- email Text + -- + -- Primary name email + -- @ + -- + -- A natural key may also contain only a single column. A natural key with + -- multiple columns is called a 'composite key'. + -- + -- @since 2.13.0.0 + | SurrogateKey UnboundIdDef + -- ^ A surrogate key is not part of the domain model for a database table. + -- You can specify a custom surro + -- + -- You can specify a custom surrogate key using the @Id@ syntax. + -- + -- @ + -- User + -- Id Text + -- name Text + -- @ + -- + -- Note that you must provide a @default=@ expression when using this in + -- order to use 'insert' or related functions. The 'insertKey' function can + -- be used instead, as it allows you to specify a key directly. Fixing this + -- issue is tracked in #1247 on GitHub. + -- + -- @since 2.13.0.0 + | DefaultKey FieldNameDB + -- ^ The default key for the entity using the settings in + -- 'MkPersistSettings'. + -- + -- This is implicit - a table without an @Id@ or @Primary@ declaration will + -- have a 'DefaultKey'. + -- + -- @since 2.13.0.0 + deriving (Show, Lift) + +-- | Construct an entity definition. +mkUnboundEntityDef + :: PersistSettings + -> ParsedEntityDef -- ^ parsed entity definition + -> UnboundEntityDef +mkUnboundEntityDef ps parsedEntDef = + UnboundEntityDef + { unboundForeignDefs = + foreigns + , unboundPrimarySpec = + case (idField, primaryComposite) of + (Just {}, Just {}) -> + error "Specified both an ID field and a Primary field" + (Just a, Nothing) -> + if unboundIdType a == Just (mkKeyConType (unboundIdEntityName a)) + then + DefaultKey (FieldNameDB $ psIdName ps) + else + SurrogateKey a + (Nothing, Just a) -> + NaturalKey a + (Nothing, Nothing) -> + DefaultKey (FieldNameDB $ psIdName ps) + , unboundEntityFields = + cols + , unboundEntityDef = + EntityDef + { entityHaskell = entNameHS + , entityDB = entNameDB + -- idField is the user-specified Id + -- otherwise useAutoIdField + -- but, adjust it if the user specified a Primary + , entityId = + EntityIdField $ + maybe autoIdField (unboundIdDefToFieldDef (defaultIdName ps) entNameHS) idField + , entityAttrs = + parsedEntityDefEntityAttributes parsedEntDef + , entityFields = + [] + , entityUniques = uniqs + , entityForeigns = [] + , entityDerives = concat $ mapMaybe takeDerives textAttribs + , entityExtra = parsedEntityDefExtras parsedEntDef + , entitySum = parsedEntityDefIsSum parsedEntDef + , entityComments = + case parsedEntityDefComments parsedEntDef of + [] -> Nothing + comments -> Just (T.unlines comments) + } + } + where + (entNameHS, entNameDB) = + entityNamesFromParsedDef ps parsedEntDef + + attribs = + parsedEntityDefFieldAttributes parsedEntDef + + textAttribs :: [[Text]] + textAttribs = + fmap tokenText <$> attribs + + (idField, primaryComposite, uniqs, foreigns) = + foldl' + (\(mid, mp, us, fs) attr -> + let + (i, p, u, f) = takeConstraint ps entNameHS cols attr + squish xs m = xs `mappend` maybeToList m + in + (just1 mid i, just1 mp p, squish us u, squish fs f) + ) + (Nothing, Nothing, [],[]) + textAttribs + + cols :: [UnboundFieldDef] + cols = reverse . fst . foldr k ([], []) $ reverse attribs + + k x (!acc, !comments) = + case listToMaybe x of + Just (DocComment comment) -> + (acc, comment : comments) + _ -> + case (setFieldComments comments <$> takeColsEx ps (tokenText <$> x)) of + Just sm -> + (sm : acc, []) + Nothing -> + (acc, []) + + autoIdField = + mkAutoIdField ps entNameHS idSqlType + + idSqlType = + maybe SqlInt64 (const $ SqlOther "Primary Key") primaryComposite + +defaultIdName :: PersistSettings -> FieldNameDB +defaultIdName = FieldNameDB . psIdName + +-- | Convert an 'UnboundIdDef' into a 'FieldDef' suitable for use in the +-- 'EntityIdField' constructor. +-- +-- @since 2.13.0.0 +unboundIdDefToFieldDef + :: FieldNameDB + -> EntityNameHS + -> UnboundIdDef + -> FieldDef +unboundIdDefToFieldDef dbField entNameHS uid = + FieldDef + { fieldHaskell = + FieldNameHS "Id" + , fieldDB = + getSqlNameOr dbField (unboundIdAttrs uid) + , fieldType = + fromMaybe (mkKeyConType entNameHS) $ unboundIdType uid + , fieldSqlType = + SqlOther "SqlType unset for Id" + , fieldStrict = + False + , fieldReference = + ForeignRef entNameHS + , fieldAttrs = + unboundIdAttrs uid + , fieldComments = + Nothing + , fieldCascade = unboundIdCascade uid + , fieldGenerated = Nothing + , fieldIsImplicitIdColumn = True + } + +-- | Convert an 'EntityNameHS' into 'FieldType' that will get parsed into the ID +-- type for the entity. +-- +-- @ +-- >>> mkKeyConType (EntityNameHS "Hello) +-- FTTypeCon Nothing "HelloId" +-- @ +-- +-- @since 2.13.0.0 +mkKeyConType :: EntityNameHS -> FieldType +mkKeyConType entNameHs = + FTTypeCon Nothing (keyConName entNameHs) + +-- | Assuming that the provided 'FieldDef' is an ID field, this converts it into +-- an 'UnboundIdDef'. +-- +-- @since 2.13.0.0 +unbindIdDef :: EntityNameHS -> FieldDef -> UnboundIdDef +unbindIdDef entityName fd = + UnboundIdDef + { unboundIdEntityName = + entityName + , unboundIdDBName = + fieldDB fd + , unboundIdAttrs = + fieldAttrs fd + , unboundIdCascade = + fieldCascade fd + , unboundIdType = + Just $ fieldType fd + } + +setFieldComments :: [Text] -> UnboundFieldDef -> UnboundFieldDef +setFieldComments xs fld = + case xs of + [] -> fld + _ -> fld { unboundFieldComments = Just (T.unlines xs) } + +just1 :: (Show x) => Maybe x -> Maybe x -> Maybe x +just1 (Just x) (Just y) = error $ "expected only one of: " + `mappend` show x `mappend` " " `mappend` show y +just1 x y = x `mplus` y + +mkAutoIdField :: PersistSettings -> EntityNameHS -> SqlType -> FieldDef +mkAutoIdField ps = + mkAutoIdField' (FieldNameDB $ psIdName ps) + +-- | Creates a default ID field. +-- +-- @since 2.13.0.0 +mkAutoIdField' :: FieldNameDB -> EntityNameHS -> SqlType -> FieldDef +mkAutoIdField' dbName entName idSqlType = + FieldDef + { fieldHaskell = FieldNameHS "Id" + , fieldDB = dbName + , fieldType = FTTypeCon Nothing $ keyConName entName + , fieldSqlType = idSqlType + , fieldReference = + NoReference + , fieldAttrs = [] + , fieldStrict = True + , fieldComments = Nothing + , fieldCascade = noCascade + , fieldGenerated = Nothing + , fieldIsImplicitIdColumn = True + } + +keyConName :: EntityNameHS -> Text +keyConName entName = unEntityNameHS entName `mappend` "Id" + +splitExtras + :: [Line] + -> ( [[Token]] + , M.Map Text [ExtraLine] + ) +splitExtras lns = + case lns of + [] -> ([], M.empty) + (line : rest) -> + case NEL.toList (tokens line) of + [Token name] + | isCapitalizedText name -> + let indent = lineIndent line + (children, rest') = span ((> indent) . lineIndent) rest + (x, y) = splitExtras rest' + in (x, M.insert name (NEL.toList . lineText <$> children) y) + ts -> + let (x, y) = splitExtras rest + in (ts:x, y) + +isCapitalizedText :: Text -> Bool +isCapitalizedText t = + not (T.null t) && isUpper (T.head t) + +takeColsEx :: PersistSettings -> [Text] -> Maybe UnboundFieldDef +takeColsEx = + takeCols + (\ft perr -> error $ "Invalid field type " ++ show ft ++ " " ++ perr) + +takeCols + :: (Text -> String -> Maybe UnboundFieldDef) + -> PersistSettings + -> [Text] + -> Maybe UnboundFieldDef +takeCols _ _ ("deriving":_) = Nothing +takeCols onErr ps (n':typ:rest') + | not (T.null n) && isLower (T.head n) = + case parseFieldType typ of + Left err -> onErr typ err + Right ft -> Just UnboundFieldDef + { unboundFieldNameHS = + FieldNameHS n + , unboundFieldNameDB = + getDbName' ps n fieldAttrs_ + , unboundFieldType = + ft + , unboundFieldAttrs = + fieldAttrs_ + , unboundFieldStrict = + fromMaybe (psStrictFields ps) mstrict + , unboundFieldComments = + Nothing + , unboundFieldCascade = + cascade_ + , unboundFieldGenerated = + generated_ + } + where + fieldAttrs_ = parseFieldAttrs attrs_ + generated_ = parseGenerated attrs_ + (cascade_, attrs_) = parseCascade rest' + (mstrict, n) + | Just x <- T.stripPrefix "!" n' = (Just True, x) + | Just x <- T.stripPrefix "~" n' = (Just False, x) + | otherwise = (Nothing, n') + +takeCols _ _ _ = Nothing + +parseGenerated :: [Text] -> Maybe Text +parseGenerated = foldl' (\acc x -> acc <|> T.stripPrefix "generated=" x) Nothing + +getDbName :: PersistSettings -> Text -> [Text] -> Text +getDbName ps n = + fromMaybe (psToDBName ps n) . listToMaybe . mapMaybe (T.stripPrefix "sql=") + +getDbName' :: PersistSettings -> Text -> [FieldAttr] -> FieldNameDB +getDbName' ps n = + getSqlNameOr (FieldNameDB $ psToDBName ps n) + +getSqlNameOr + :: FieldNameDB + -> [FieldAttr] + -> FieldNameDB +getSqlNameOr def = + maybe def FieldNameDB . findAttrSql + where + findAttrSql = + listToMaybe . mapMaybe isAttrSql + isAttrSql attr = + case attr of + FieldAttrSql t -> + Just t + _ -> + Nothing + +takeConstraint + :: PersistSettings + -> EntityNameHS + -> [UnboundFieldDef] + -> [Text] + -> (Maybe UnboundIdDef, Maybe UnboundCompositeDef, Maybe UniqueDef, Maybe UnboundForeignDef) +takeConstraint ps entityName defs (n:rest) | isCapitalizedText n = takeConstraint' + where + takeConstraint' + | n == "Unique" = + (Nothing, Nothing, takeUniq ps (unEntityNameHS entityName) defs rest, Nothing) + | n == "Foreign" = + (Nothing, Nothing, Nothing, Just $ takeForeign ps entityName rest) + | n == "Primary" = + (Nothing, Just $ takeComposite ps defNames rest, Nothing, Nothing) + | n == "Id" = + (Just $ takeId ps entityName rest, Nothing, Nothing, Nothing) + | otherwise = + (Nothing, Nothing, takeUniq ps "" defs (n:rest), Nothing) -- retain compatibility with original unique constraint + defNames = + map unboundFieldNameHS defs +takeConstraint _ _ _ _ = (Nothing, Nothing, Nothing, Nothing) + +-- | This type represents an @Id@ declaration in the QuasiQuoted syntax. +-- +-- > Id +-- +-- This uses the implied settings, and is equivalent to omitting the @Id@ +-- statement entirely. +-- +-- > Id Text +-- +-- This will set the field type of the ID to be 'Text'. +-- +-- > Id Text sql=foo_id +-- +-- This will set the field type of the Id to be 'Text' and the SQL DB name to be @foo_id@. +-- +-- > Id FooId +-- +-- This results in a shared primary key - the @FooId@ refers to a @Foo@ table. +-- +-- > Id FooId OnDelete Cascade +-- +-- You can set a cascade behavior on an ID column. +-- +-- @since 2.13.0.0 +data UnboundIdDef = UnboundIdDef + { unboundIdEntityName :: EntityNameHS + , unboundIdDBName :: !FieldNameDB + , unboundIdAttrs :: [FieldAttr] + , unboundIdCascade :: FieldCascade + , unboundIdType :: Maybe FieldType + } + deriving (Show, Lift) + +-- TODO: this is hacky (the double takeCols, the setFieldDef stuff, and setIdName. +-- need to re-work takeCols function +takeId :: PersistSettings -> EntityNameHS -> [Text] -> UnboundIdDef +takeId ps entityName texts = + UnboundIdDef + { unboundIdDBName = + FieldNameDB $ psIdName ps + , unboundIdEntityName = + entityName + , unboundIdCascade = + cascade_ + , unboundIdAttrs = + parseFieldAttrs attrs_ + , unboundIdType = + typ + } + where + typ = + case texts of + [] -> + Nothing + (t : _) -> + case parseFieldType t of + Left _ -> + Nothing + Right ft -> + Just ft + (cascade_, attrs_) = parseCascade texts + +-- | A definition for a composite primary key. +-- +-- @since.2.13.0.0 +data UnboundCompositeDef = UnboundCompositeDef + { unboundCompositeCols :: [FieldNameHS] + -- ^ The field names for the primary key. + -- + -- @since 2.13.0.0 + , unboundCompositeAttrs :: [Attr] + -- ^ A list of attributes defined on the primary key. This is anything that + -- occurs after a @!@ character. + -- + -- @since 2.13.0.0 + } + deriving (Show, Lift) + +takeComposite + :: PersistSettings + -> [FieldNameHS] + -> [Text] + -> UnboundCompositeDef +takeComposite ps fields pkcols = + UnboundCompositeDef + { unboundCompositeCols = + map (getDef fields) cols + , unboundCompositeAttrs = + attrs + } + where + (cols, attrs) = break ("!" `T.isPrefixOf`) pkcols + getDef [] t = error $ "Unknown column in primary key constraint: " ++ show t + getDef (d:ds) t + | d == FieldNameHS t = + -- TODO: check for nullability in later step + -- if nullable (fieldAttrs d) /= NotNullable + -- then error $ "primary key column cannot be nullable: " ++ show t ++ show fields + d + | otherwise = + getDef ds t + +-- Unique UppercaseConstraintName list of lowercasefields terminated +-- by ! or sql= such that a unique constraint can look like: +-- `UniqueTestNull fieldA fieldB sql=ConstraintNameInDatabase !force` +-- Here using sql= sets the name of the constraint. +takeUniq + :: PersistSettings + -> Text + -> [UnboundFieldDef] + -> [Text] + -> Maybe UniqueDef +takeUniq ps tableName defs (n : rest) + | isCapitalizedText n = do + fields <- mfields + pure UniqueDef + { uniqueHaskell = + ConstraintNameHS n + , uniqueDBName = + dbName + , uniqueFields = + fmap (\a -> (FieldNameHS a, getDBName defs a)) fields + , uniqueAttrs = + attrs + } + where + isAttr a = + "!" `T.isPrefixOf` a + isSqlName a = + "sql=" `T.isPrefixOf` a + isNonField a = + isAttr a || isSqlName a + (fieldsList, nonFields) = + break isNonField rest + mfields = + NEL.nonEmpty fieldsList + + attrs = filter isAttr nonFields + + usualDbName = + ConstraintNameDB $ psToDBName ps (tableName `T.append` n) + sqlName :: Maybe ConstraintNameDB + sqlName = + case find isSqlName nonFields of + Nothing -> + Nothing + (Just t) -> + case drop 1 $ T.splitOn "=" t of + (x : _) -> Just (ConstraintNameDB x) + _ -> Nothing + dbName = fromMaybe usualDbName sqlName + + getDBName [] t = + error $ "Unknown column in unique constraint: " ++ show t + ++ " " ++ show defs ++ show n ++ " " ++ show attrs + getDBName (d:ds) t + | unboundFieldNameHS d == FieldNameHS t = + unboundFieldNameDB d + | otherwise = + getDBName ds t + +takeUniq _ tableName _ xs = + error $ "invalid unique constraint on table[" + ++ show tableName + ++ "] expecting an uppercase constraint name xs=" + ++ show xs + +-- | Define an explicit foreign key reference. +-- +-- @ +-- User +-- name Text +-- email Text +-- +-- Primary name email +-- +-- Dog +-- ownerName Text +-- ownerEmail Text +-- +-- Foreign User fk_dog_user ownerName ownerEmail +-- @ +-- +-- @since 2.13.0.0 +data UnboundForeignDef + = UnboundForeignDef + { unboundForeignFields :: UnboundForeignFieldList + -- ^ Fields in the source entity. + -- + -- @since 2.13.0.0 + , unboundForeignDef :: ForeignDef + -- ^ The 'ForeignDef' which needs information filled in. + -- + -- This value is unreliable. See the parsing code to see what data is filled + -- in here. + -- + -- @since 2.13.0.0 + } + deriving (Eq, Show, Lift) + +-- | A list of fields present on the foreign reference. +data UnboundForeignFieldList + = FieldListImpliedId (NonEmpty FieldNameHS) + -- ^ If no @References@ keyword is supplied, then it is assumed that you are + -- referring to the @Primary@ key or @Id@ of the target entity. + -- + -- @since 2.13.0.0 + | FieldListHasReferences (NonEmpty ForeignFieldReference) + -- ^ You can specify the exact columns you're referring to here, if they + -- aren't part of a primary key. Most databases expect a unique index on the + -- columns you refer to, but Persistent doesnt' check that. + -- + -- @ + -- User + -- Id UUID default="uuid_generate_v1mc()" + -- name Text + -- + -- UniqueName name + -- + -- Dog + -- ownerName Text + -- + -- Foreign User fk_dog_user ownerName References name + -- @ + -- + -- @since 2.13.0.0 + deriving (Eq, Show, Lift) + +-- | A pairing of the 'FieldNameHS' for the source table to the 'FieldNameHS' +-- for the target table. +-- +-- @since 2.13.0.0 +data ForeignFieldReference = + ForeignFieldReference + { ffrSourceField :: FieldNameHS + -- ^ The column on the source table. + -- + -- @since 2.13.0.0 + , ffrTargetField :: FieldNameHS + -- ^ The column on the target table. + -- + -- @since 2.13.0.0 + } + deriving (Eq, Show, Lift) + +unbindForeignDef :: ForeignDef -> UnboundForeignDef +unbindForeignDef fd = + UnboundForeignDef + { unboundForeignFields = + FieldListHasReferences $ NEL.fromList $ fmap mk (foreignFields fd) + , unboundForeignDef = + fd + } + where + mk ((fH, _), (pH, _)) = + ForeignFieldReference + { ffrSourceField = fH + , ffrTargetField = pH + } + +mkUnboundForeignFieldList + :: [Text] + -> [Text] + -> Either String UnboundForeignFieldList +mkUnboundForeignFieldList (fmap FieldNameHS -> source) (fmap FieldNameHS -> target) = + case NEL.nonEmpty source of + Nothing -> + Left "No fields on foreign reference." + Just sources -> + case NEL.nonEmpty target of + Nothing -> + Right $ FieldListImpliedId sources + Just targets -> + if length targets /= length sources + then + Left "Target and source length differe on foreign reference." + else + Right + $ FieldListHasReferences + $ NEL.zipWith ForeignFieldReference sources targets + +takeForeign + :: PersistSettings + -> EntityNameHS + -> [Text] + -> UnboundForeignDef +takeForeign ps entityName = takeRefTable + where + errorPrefix :: String + errorPrefix = "invalid foreign key constraint on table[" ++ show (unEntityNameHS entityName) ++ "] " + + takeRefTable :: [Text] -> UnboundForeignDef + takeRefTable [] = + error $ errorPrefix ++ " expecting foreign table name" + takeRefTable (refTableName:restLine) = + go restLine Nothing Nothing + where + go :: [Text] -> Maybe CascadeAction -> Maybe CascadeAction -> UnboundForeignDef + go (constraintNameText:rest) onDelete onUpdate + | not (T.null constraintNameText) && isLower (T.head constraintNameText) = + UnboundForeignDef + { unboundForeignFields = + either error id $ mkUnboundForeignFieldList foreignFields parentFields + , unboundForeignDef = + ForeignDef + { foreignRefTableHaskell = + EntityNameHS refTableName + , foreignRefTableDBName = + EntityNameDB $ psToDBName ps refTableName + , foreignConstraintNameHaskell = + constraintName + , foreignConstraintNameDBName = + toFKConstraintNameDB ps entityName constraintName + , foreignFieldCascade = + FieldCascade + { fcOnDelete = onDelete + , fcOnUpdate = onUpdate + } + , foreignAttrs = + attrs + , foreignFields = + [] + , foreignNullable = + False + , foreignToPrimary = + null parentFields + } + } + where + constraintName = + ConstraintNameHS constraintNameText + + (fields, attrs) = + break ("!" `T.isPrefixOf`) rest + (foreignFields, parentFields) = + case break (== "References") fields of + (ffs, []) -> + (ffs, []) + (ffs, _ : pfs) -> + case (length ffs, length pfs) of + (flen, plen) + | flen == plen -> + (ffs, pfs) + (flen, plen) -> + error $ errorPrefix ++ concat + [ "Found " , show flen + , " foreign fields but " + , show plen, " parent fields" + ] + + go ((parseCascadeAction CascadeDelete -> Just cascadingAction) : rest) onDelete' onUpdate = + case onDelete' of + Nothing -> + go rest (Just cascadingAction) onUpdate + Just _ -> + error $ errorPrefix ++ "found more than one OnDelete actions" + + go ((parseCascadeAction CascadeUpdate -> Just cascadingAction) : rest) onDelete onUpdate' = + case onUpdate' of + Nothing -> + go rest onDelete (Just cascadingAction) + Just _ -> + error $ errorPrefix ++ "found more than one OnUpdate actions" + + go xs _ _ = error $ errorPrefix ++ "expecting a lower case constraint name or a cascading action xs=" ++ show xs + +toFKConstraintNameDB :: PersistSettings -> EntityNameHS -> ConstraintNameHS -> ConstraintNameDB +toFKConstraintNameDB ps entityName constraintName = + ConstraintNameDB $ psToDBName ps (psToFKName ps entityName constraintName) + +data CascadePrefix = CascadeUpdate | CascadeDelete + +parseCascade :: [Text] -> (FieldCascade, [Text]) +parseCascade allTokens = + go [] Nothing Nothing allTokens + where + go acc mupd mdel tokens_ = + case tokens_ of + [] -> + ( FieldCascade + { fcOnDelete = mdel + , fcOnUpdate = mupd + } + , acc + ) + this : rest -> + case parseCascadeAction CascadeUpdate this of + Just cascUpd -> + case mupd of + Nothing -> + go acc (Just cascUpd) mdel rest + Just _ -> + nope "found more than one OnUpdate action" + Nothing -> + case parseCascadeAction CascadeDelete this of + Just cascDel -> + case mdel of + Nothing -> + go acc mupd (Just cascDel) rest + Just _ -> + nope "found more than one OnDelete action: " + Nothing -> + go (this : acc) mupd mdel rest + nope msg = + error $ msg <> ", tokens: " <> show allTokens + +parseCascadeAction + :: CascadePrefix + -> Text + -> Maybe CascadeAction +parseCascadeAction prfx text = do + cascadeStr <- T.stripPrefix ("On" <> toPrefix prfx) text + case readEither (T.unpack cascadeStr) of + Right a -> + Just a + Left _ -> + Nothing + where + toPrefix cp = + case cp of + CascadeUpdate -> "Update" + CascadeDelete -> "Delete" + +takeDerives :: [Text] -> Maybe [Text] +takeDerives ("deriving":rest) = Just rest +takeDerives _ = Nothing + +nullable :: [FieldAttr] -> IsNullable +nullable s + | FieldAttrMaybe `elem` s = Nullable ByMaybeAttr + | FieldAttrNullable `elem` s = Nullable ByNullableAttr + | otherwise = NotNullable + + +-- | Returns 'True' if the 'UnboundFieldDef' does not have a 'MigrationOnly' or +-- 'SafeToRemove' flag from the QuasiQuoter. +-- +-- @since 2.13.0.0 +isHaskellUnboundField :: UnboundFieldDef -> Bool +isHaskellUnboundField fd = + FieldAttrMigrationOnly `notElem` unboundFieldAttrs fd && + FieldAttrSafeToRemove `notElem` unboundFieldAttrs fd + +-- | Return the 'EntityNameHS' for an 'UnboundEntityDef'. +-- +-- @since 2.13.0.0 +getUnboundEntityNameHS :: UnboundEntityDef -> EntityNameHS +getUnboundEntityNameHS = entityHaskell . unboundEntityDef diff --git a/persistent/Database/Persist/Sql.hs b/persistent/Database/Persist/Sql.hs index 5bb716e98..ff6751e42 100644 --- a/persistent/Database/Persist/Sql.hs +++ b/persistent/Database/Persist/Sql.hs @@ -19,10 +19,15 @@ module Database.Persist.Sql , transactionSaveWithIsolation , transactionUndo , transactionUndoWithIsolation - , IsolationLevel (..) , getStmtConn + , mkColumns + , BackendSpecificOverrides + , emptyBackendSpecificOverrides + , getBackendSpecificForeignKeyName + , setBackendSpecificForeignKeyName + , defaultAttribute -- * Internal - , module Database.Persist.Sql.Internal + , IsolationLevel(..) , decorateSQLWithLimitOffset ) where @@ -36,7 +41,7 @@ import Database.Persist.Sql.Migration import Database.Persist.Sql.Raw import Database.Persist.Sql.Run hiding (rawAcquireSqlConn, rawRunSqlPool) import Database.Persist.Sql.Types -import Database.Persist.Sql.Types.Internal (IsolationLevel(..)) +import Database.Persist.Sql.Types.Internal (IsolationLevel(..), SqlBackend(..)) import Database.Persist.Sql.Orphan.PersistQuery import Database.Persist.Sql.Orphan.PersistStore diff --git a/persistent/Database/Persist/Sql/Class.hs b/persistent/Database/Persist/Sql/Class.hs index 9a4aa9a71..bef70fe10 100644 --- a/persistent/Database/Persist/Sql/Class.hs +++ b/persistent/Database/Persist/Sql/Class.hs @@ -1,10 +1,11 @@ {-# LANGUAGE CPP #-} -{-# LANGUAGE TypeOperators, FlexibleInstances #-} +{-# LANGUAGE DataKinds #-} +{-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE PatternGuards #-} {-# LANGUAGE ScopedTypeVariables #-} +{-# LANGUAGE TypeOperators #-} {-# LANGUAGE TypeSynonymInstances #-} {-# LANGUAGE UndecidableInstances #-} -{-# LANGUAGE DataKinds #-} module Database.Persist.Sql.Class ( RawSql (..) @@ -13,10 +14,10 @@ module Database.Persist.Sql.Class , unPrefix ) where -import GHC.TypeLits (Symbol, KnownSymbol, symbolVal) import Data.Bits (bitSizeMaybe) import Data.ByteString (ByteString) import Data.Fixed +import Data.Foldable (toList) import Data.Int import qualified Data.IntMap as IM import qualified Data.Map as M @@ -27,9 +28,10 @@ import qualified Data.Set as S import Data.Text (Text, intercalate, pack) import qualified Data.Text as T import qualified Data.Text.Lazy as TL -import Data.Time (UTCTime, TimeOfDay, Day) +import Data.Time (Day, TimeOfDay, UTCTime) import qualified Data.Vector as V import Data.Word +import GHC.TypeLits (KnownSymbol, Symbol, symbolVal) import Text.Blaze.Html (Html) import Database.Persist @@ -66,27 +68,42 @@ instance rawSqlProcessRow = keyFromValues instance - (PersistEntity record, PersistEntityBackend record ~ backend, IsPersistBackend backend) => - RawSql (Entity record) where - rawSqlCols escape _ent = (length sqlFields, [intercalate ", " sqlFields]) - where - sqlFields = map (((name <> ".") <>) . escapeWith escape) - $ map fieldDB - -- Hacky for a composite key because - -- it selects the same field multiple times - $ entityKeyFields entDef ++ entityFields entDef - name = escapeWith escape (entityDB entDef) - entDef = entityDef (Nothing :: Maybe record) + (PersistEntity record, PersistEntityBackend record ~ backend, IsPersistBackend backend) + => + RawSql (Entity record) + where + rawSqlCols escape _ent = (length sqlFields, [intercalate ", " $ toList sqlFields]) + where + sqlFields = + fmap (((name <> ".") <>) . escapeWith escape) + $ fmap fieldDB + $ keyAndEntityFields entDef + name = + escapeWith escape (getEntityDBName entDef) + entDef = + entityDef (Nothing :: Maybe record) rawSqlColCountReason a = case fst (rawSqlCols (error "RawSql") a) of 1 -> "one column for an 'Entity' data type without fields" - n -> show n ++ " columns for an 'Entity' data type" - rawSqlProcessRow row = case splitAt nKeyFields row of - (rowKey, rowVal) -> Entity <$> keyFromValues rowKey - <*> fromPersistValues rowVal - where - nKeyFields = length $ entityKeyFields entDef - entDef = entityDef (Nothing :: Maybe record) + n -> show n <> " columns for an 'Entity' data type" + rawSqlProcessRow row = + case keyFromRecordM of + Just mkKey -> do + val <- fromPersistValues row + pure Entity + { entityKey = + mkKey val + , entityVal = + val + } + Nothing -> + case row of + (k : rest) -> + Entity + <$> keyFromValues [k] + <*> fromPersistValues rest + [] -> + Left "Row was empty" -- | This newtype wrapper is useful when selecting an entity out of the -- database and you want to provide a prefix to the table being selected. @@ -134,7 +151,7 @@ newtype EntityWithPrefix (prefix :: Symbol) record -- -- @ -- myQuery :: 'SqlPersistM' ['Entity' Person] --- myQuery = map (unPrefix @\"p\") <$> rawSql query [] +-- myQuery = fmap (unPrefix @\"p\") <$> rawSql query [] -- where -- query = "SELECT ?? FROM person AS p" -- @ @@ -149,25 +166,34 @@ instance , PersistEntityBackend record ~ backend , IsPersistBackend backend ) - => RawSql (EntityWithPrefix prefix record) where - rawSqlCols escape _ent = (length sqlFields, [intercalate ", " sqlFields]) - where - sqlFields = map (((name <> ".") <>) . escapeWith escape) - $ map fieldDB + => + RawSql (EntityWithPrefix prefix record) + where + rawSqlCols escape _ent = (length sqlFields, [intercalate ", " $ toList sqlFields]) + where + sqlFields = + fmap (((name <> ".") <>) . escapeWith escape) + $ fmap fieldDB -- Hacky for a composite key because -- it selects the same field multiple times - $ entityKeyFields entDef ++ entityFields entDef - name = pack $ symbolVal (Proxy :: Proxy prefix) - entDef = entityDef (Nothing :: Maybe record) + $ keyAndEntityFields entDef + name = + pack $ symbolVal (Proxy :: Proxy prefix) + entDef = + entityDef (Nothing :: Maybe record) rawSqlColCountReason a = case fst (rawSqlCols (error "RawSql") a) of - 1 -> "one column for an 'Entity' data type without fields" - n -> show n ++ " columns for an 'Entity' data type" - rawSqlProcessRow row = case splitAt nKeyFields row of - (rowKey, rowVal) -> fmap EntityWithPrefix $ Entity <$> keyFromValues rowKey - <*> fromPersistValues rowVal + 1 -> "one column for an 'Entity' data type without fields" + n -> show n ++ " columns for an 'Entity' data type" + rawSqlProcessRow row = + case splitAt nKeyFields row of + (rowKey, rowVal) -> + fmap EntityWithPrefix $ + Entity + <$> keyFromValues rowKey + <*> fromPersistValues rowVal where - nKeyFields = length $ entityKeyFields entDef + nKeyFields = length $ getEntityKeyFields entDef entDef = entityDef (Nothing :: Maybe record) -- | @since 1.0.1 diff --git a/persistent/Database/Persist/Sql/Internal.hs b/persistent/Database/Persist/Sql/Internal.hs index d33dbfd6e..e44b84c29 100644 --- a/persistent/Database/Persist/Sql/Internal.hs +++ b/persistent/Database/Persist/Sql/Internal.hs @@ -8,6 +8,8 @@ module Database.Persist.Sql.Internal ( mkColumns , defaultAttribute , BackendSpecificOverrides(..) + , getBackendSpecificForeignKeyName + , setBackendSpecificForeignKeyName , emptyBackendSpecificOverrides ) where @@ -16,20 +18,49 @@ import Data.Monoid (mappend, mconcat) import Data.Text (Text) import qualified Data.Text as T +import Data.Maybe (fromMaybe, listToMaybe, mapMaybe) +import Database.Persist.EntityDef import Database.Persist.Quasi import Database.Persist.Sql.Types import Database.Persist.Types -import Data.Maybe (fromMaybe, listToMaybe, mapMaybe) --- | Record of functions to override the default behavior in 'mkColumns'. --- It is recommended you initialize this with 'emptyBackendSpecificOverrides' and override the default values, --- so that as new fields are added, your code still compiles. +-- | Record of functions to override the default behavior in 'mkColumns'. It is +-- recommended you initialize this with 'emptyBackendSpecificOverrides' and +-- override the default values, so that as new fields are added, your code still +-- compiles. +-- +-- For added safety, use the @getBackendSpecific*@ and @setBackendSpecific*@ +-- functions, as a breaking change to the record field labels won't be reflected +-- in a major version bump of the library. -- -- @since 2.11 data BackendSpecificOverrides = BackendSpecificOverrides { backendSpecificForeignKeyName :: Maybe (EntityNameDB -> FieldNameDB -> ConstraintNameDB) } +-- | If the override is defined, then this returns a function that accepts an +-- entity name and field name and provides the 'ConstraintNameDB' for the +-- foreign key constraint. +-- +-- An abstract accessor for the 'BackendSpecificOverrides' +-- +-- @since 2.13.0.0 +getBackendSpecificForeignKeyName + :: BackendSpecificOverrides + -> Maybe (EntityNameDB -> FieldNameDB -> ConstraintNameDB) +getBackendSpecificForeignKeyName = + backendSpecificForeignKeyName + +-- | Set the backend's foreign key generation function to this value. +-- +-- @since 2.13.0.0 +setBackendSpecificForeignKeyName + :: (EntityNameDB -> FieldNameDB -> ConstraintNameDB) + -> BackendSpecificOverrides + -> BackendSpecificOverrides +setBackendSpecificForeignKeyName func bso = + bso { backendSpecificForeignKeyName = Just func } + findMaybe :: (a -> Maybe b) -> [a] -> Maybe b findMaybe p = listToMaybe . mapMaybe p @@ -51,15 +82,18 @@ mkColumns -> BackendSpecificOverrides -> ([Column], [UniqueDef], [ForeignDef]) mkColumns allDefs t overrides = - (cols, entityUniques t, entityForeigns t) + (cols, getEntityUniques t, getEntityForeignDefs t) where cols :: [Column] - cols = map goId idCol `mappend` map go (entityFields t) + cols = map goId idCol `mappend` map go (getEntityFieldsDatabase t) idCol :: [FieldDef] - idCol = case entityPrimary t of - Just _ -> [] - Nothing -> [entityId t] + idCol = + case getEntityId t of + EntityIdNaturalKey _ -> + [] + EntityIdField fd -> + [fd] goId :: FieldDef -> Column goId fd = @@ -100,14 +134,13 @@ mkColumns allDefs t overrides = } tableName :: EntityNameDB - tableName = entityDB t - + tableName = getEntityDBName t go :: FieldDef -> Column go fd = Column { cName = fieldDB fd - , cNull = nullable (fieldAttrs fd) /= NotNullable || entitySum t + , cNull = nullable (fieldAttrs fd) /= NotNullable || isEntitySum t , cSqlType = fieldSqlType fd , cDefault = defaultAttribute $ fieldAttrs fd , cGenerated = fieldGenerated fd @@ -145,7 +178,7 @@ mkColumns allDefs t overrides = -> [FieldAttr] -> Maybe (EntityNameDB, ConstraintNameDB) -- table name, constraint name ref c fe [] - | ForeignRef f _ <- fe = + | ForeignRef f <- fe = Just (resolveTableName allDefs f, refNameFn tableName c) | otherwise = Nothing ref _ _ (FieldAttrNoreference:_) = Nothing @@ -165,5 +198,5 @@ refName (EntityNameDB table) (FieldNameDB column) = resolveTableName :: [EntityDef] -> EntityNameHS -> EntityNameDB resolveTableName [] (EntityNameHS t) = error $ "Table not found: " `Data.Monoid.mappend` T.unpack t resolveTableName (e:es) hn - | entityHaskell e == hn = entityDB e + | getEntityHaskellName e == hn = getEntityDBName e | otherwise = resolveTableName es hn diff --git a/persistent/Database/Persist/Sql/Migration.hs b/persistent/Database/Persist/Sql/Migration.hs index f4846f309..e431253c3 100644 --- a/persistent/Database/Persist/Sql/Migration.hs +++ b/persistent/Database/Persist/Sql/Migration.hs @@ -15,6 +15,7 @@ module Database.Persist.Sql.Migration , reportError , addMigrations , addMigration + , runSqlCommand ) where @@ -31,6 +32,7 @@ import System.IO.Silently (hSilence) import GHC.Stack import Database.Persist.Sql.Types +import Database.Persist.Sql.Types.Internal import Database.Persist.Sql.Raw import Database.Persist.Types import Database.Persist.Sql.Orphan.PersistStore() @@ -208,3 +210,14 @@ addMigrations :: CautiousMigration -> Migration addMigrations = lift . tell + +-- | Run an action against the database during a migration. Can be useful for eg +-- creating Postgres extensions: +-- +-- @ +-- runSqlCommand $ 'rawExecute' "CREATE EXTENSION IF NOT EXISTS \"uuid-ossp\";" [] +-- @ +-- +-- @since 2.13.0.0 +runSqlCommand :: SqlPersistT IO () -> Migration +runSqlCommand = lift . lift diff --git a/persistent/Database/Persist/Sql/Orphan/PersistQuery.hs b/persistent/Database/Persist/Sql/Orphan/PersistQuery.hs index 2308ef2ae..c81f75e62 100644 --- a/persistent/Database/Persist/Sql/Orphan/PersistQuery.hs +++ b/persistent/Database/Persist/Sql/Orphan/PersistQuery.hs @@ -3,6 +3,7 @@ {-# OPTIONS_GHC -fno-warn-orphans #-} +-- | TODO: delete this module and get it in with SqlBackend.Internal module Database.Persist.Sql.Orphan.PersistQuery ( deleteWhereCount , updateWhereCount @@ -20,19 +21,27 @@ import Data.ByteString.Char8 (readInteger) import Data.Conduit import qualified Data.Conduit.List as CL import Data.Int (Int64) -import Data.List (transpose, inits, find) +import Data.List (find, inits, transpose) import Data.Maybe (isJust) -import Data.Monoid (Monoid (..), (<>)) -import qualified Data.Text as T +import Data.Monoid (Monoid(..), (<>)) import Data.Text (Text) +import qualified Data.Text as T +import Data.Foldable (toList) import Database.Persist hiding (updateField) -import Database.Persist.Sql.Util ( - entityColumnNames, parseEntityValues, isIdField, updatePersistValue - , mkUpdateText, commaSeparated, dbIdColumns) -import Database.Persist.Sql.Types -import Database.Persist.Sql.Raw import Database.Persist.Sql.Orphan.PersistStore (withRawQuery) +import Database.Persist.Sql.Raw +import Database.Persist.Sql.Types.Internal + (SqlBackend(..), SqlReadBackend, SqlWriteBackend) +import Database.Persist.Sql.Util + ( commaSeparated + , dbIdColumns + , keyAndEntityColumnNames + , isIdField + , mkUpdateText + , parseEntityValues + , updatePersistValue + ) -- orphaned instance for convenience of modularity instance PersistQueryRead SqlBackend where @@ -91,9 +100,13 @@ instance PersistQueryRead SqlBackend where where (limit, offset, orders) = limitOffsetOrder opts - parse vals = case parseEntityValues t vals of - Left s -> liftIO $ throwIO $ PersistMarshalError s - Right row -> return row + parse vals = + case parseEntityValues t vals of + Left s -> + liftIO $ throwIO $ + PersistMarshalError ("selectSourceRes: " <> s <> ", vals: " <> T.pack (show vals )) + Right row -> + return row t = entityDef $ dummyFromFilts filts wher conn = if null filts then "" @@ -102,8 +115,8 @@ instance PersistQueryRead SqlBackend where case map (orderClause False conn) orders of [] -> "" ords -> " ORDER BY " <> T.intercalate "," ords - cols = commaSeparated . entityColumnNames t - sql conn = connLimitOffset conn (limit,offset) (not (null orders)) $ mconcat + cols = commaSeparated . toList . keyAndEntityColumnNames t + sql conn = connLimitOffset conn (limit,offset) $ mconcat [ "SELECT " , cols conn , " FROM " @@ -118,13 +131,13 @@ instance PersistQueryRead SqlBackend where return $ fmap (.| CL.mapM parse) srcRes where t = entityDef $ dummyFromFilts filts - cols conn = T.intercalate "," $ dbIdColumns conn t + cols conn = T.intercalate "," $ toList $ dbIdColumns conn t wher conn = if null filts then "" else filterClause Nothing conn filts - sql conn = connLimitOffset conn (limit,offset) (not (null orders)) $ mconcat + sql conn = connLimitOffset conn (limit,offset) $ mconcat [ "SELECT " , cols conn , " FROM " @@ -148,8 +161,8 @@ instance PersistQueryRead SqlBackend where [PersistDouble x] -> return [PersistInt64 (truncate x)] -- oracle returns Double _ -> return xs Just pdef -> - let pks = map fieldHaskell $ compositeFields pdef - keyvals = map snd $ filter (\(a, _) -> let ret=isJust (find (== a) pks) in ret) $ zip (map fieldHaskell $ entityFields t) xs + let pks = map fieldHaskell $ toList $ compositeFields pdef + keyvals = map snd $ filter (\(a, _) -> let ret=isJust (find (== a) pks) in ret) $ zip (map fieldHaskell $ getEntityFields t) xs in return keyvals case keyFromValues keyvals of Right k -> return k @@ -249,13 +262,14 @@ data FilterTablePrefix -- -- @since 2.12.1.0 -filterClauseHelper :: (PersistEntity val) - => Maybe FilterTablePrefix -- ^ include table name or PostgresSQL EXCLUDED - -> Bool -- ^ include WHERE - -> SqlBackend - -> OrNull - -> [Filter val] - -> (Text, [PersistValue]) +filterClauseHelper + :: (PersistEntity val) + => Maybe FilterTablePrefix -- ^ include table name or PostgresSQL EXCLUDED + -> Bool -- ^ include WHERE + -> SqlBackend + -> OrNull + -> [Filter val] + -> (Text, [PersistValue]) filterClauseHelper tablePrefix includeWhere conn orNull filters = (if not (T.null sql) && includeWhere then " WHERE " <> sql @@ -277,85 +291,96 @@ filterClauseHelper tablePrefix includeWhere conn orNull filters = go (FilterOr fs) = combine " OR " fs go (Filter field value pfilter) = let t = entityDef $ dummyFromFilts [Filter field value pfilter] - in case (isIdField field, entityPrimary t, allVals) of - (True, Just pdef, PersistList ys:_) -> - if length (compositeFields pdef) /= length ys - then error $ "wrong number of entries in compositeFields vs PersistList allVals=" ++ show allVals + in + case (isIdField field, entityPrimary t, allVals) of + (True, Just pdef, PersistList ys:_) -> + let cfields = toList $ compositeFields pdef in + if length cfields /= length ys + then error $ "wrong number of entries in compositeFields vs PersistList allVals=" ++ show allVals else - case (allVals, pfilter, isCompFilter pfilter) of - ([PersistList xs], Eq, _) -> - let sqlcl=T.intercalate " and " (map (\a -> connEscapeFieldName conn (fieldDB a) <> showSqlFilter pfilter <> "? ") (compositeFields pdef)) - in (wrapSql sqlcl,xs) - ([PersistList xs], Ne, _) -> - let sqlcl=T.intercalate " or " (map (\a -> connEscapeFieldName conn (fieldDB a) <> showSqlFilter pfilter <> "? ") (compositeFields pdef)) - in (wrapSql sqlcl,xs) - (_, In, _) -> - let xxs = transpose (map fromPersistList allVals) - sqls=map (\(a,xs) -> connEscapeFieldName conn (fieldDB a) <> showSqlFilter pfilter <> "(" <> T.intercalate "," (replicate (length xs) " ?") <> ") ") (zip (compositeFields pdef) xxs) - in (wrapSql (T.intercalate " and " (map wrapSql sqls)), concat xxs) - (_, NotIn, _) -> - let xxs = transpose (map fromPersistList allVals) - sqls=map (\(a,xs) -> connEscapeFieldName conn (fieldDB a) <> showSqlFilter pfilter <> "(" <> T.intercalate "," (replicate (length xs) " ?") <> ") ") (zip (compositeFields pdef) xxs) - in (wrapSql (T.intercalate " or " (map wrapSql sqls)), concat xxs) - ([PersistList xs], _, True) -> - let zs = tail (inits (compositeFields pdef)) - sql1 = map (\b -> wrapSql (T.intercalate " and " (map (\(i,a) -> sql2 (i==length b) a) (zip [1..] b)))) zs - sql2 islast a = connEscapeFieldName conn (fieldDB a) <> (if islast then showSqlFilter pfilter else showSqlFilter Eq) <> "? " - sqlcl = T.intercalate " or " sql1 - in (wrapSql sqlcl, concat (tail (inits xs))) - (_, BackendSpecificFilter _, _) -> error "unhandled type BackendSpecificFilter for composite/non id primary keys" - _ -> error $ "unhandled type/filter for composite/non id primary keys pfilter=" ++ show pfilter ++ " persistList="++show allVals - (True, Just pdef, []) -> - error $ "empty list given as filter value filter=" ++ show pfilter ++ " persistList=" ++ show allVals ++ " pdef=" ++ show pdef - (True, Just pdef, _) -> - error $ "unhandled error for composite/non id primary keys filter=" ++ show pfilter ++ " persistList=" ++ show allVals ++ " pdef=" ++ show pdef - - _ -> case (isNull, pfilter, length notNullVals) of - (True, Eq, _) -> (name <> " IS NULL", []) - (True, Ne, _) -> (name <> " IS NOT NULL", []) - (False, Ne, _) -> (T.concat - [ "(" - , name - , " IS NULL OR " - , name - , " <> " - , qmarks - , ")" - ], notNullVals) - -- We use 1=2 (and below 1=1) to avoid using TRUE and FALSE, since - -- not all databases support those words directly. - (_, In, 0) -> ("1=2" <> orNullSuffix, []) - (False, In, _) -> (name <> " IN " <> qmarks <> orNullSuffix, allVals) - (True, In, _) -> (T.concat - [ "(" - , name - , " IS NULL OR " - , name - , " IN " - , qmarks - , ")" - ], notNullVals) - (False, NotIn, 0) -> ("1=1", []) - (True, NotIn, 0) -> (name <> " IS NOT NULL", []) - (False, NotIn, _) -> (T.concat - [ "(" - , name - , " IS NULL OR " - , name - , " NOT IN " - , qmarks - , ")" - ], notNullVals) - (True, NotIn, _) -> (T.concat - [ "(" - , name - , " IS NOT NULL AND " - , name - , " NOT IN " - , qmarks - , ")" - ], notNullVals) - _ -> (name <> showSqlFilter pfilter <> "?" <> orNullSuffix, allVals) + case (allVals, pfilter, isCompFilter pfilter) of + ([PersistList xs], Eq, _) -> + let + sqlcl = + T.intercalate " and " + (map (\a -> connEscapeFieldName conn (fieldDB a) <> showSqlFilter pfilter <> "? ") cfields) + in + (wrapSql sqlcl, xs) + ([PersistList xs], Ne, _) -> + let + sqlcl = + T.intercalate " or " (map (\a -> connEscapeFieldName conn (fieldDB a) <> showSqlFilter pfilter <> "? ") cfields) + in + (wrapSql sqlcl, xs) + (_, In, _) -> + let xxs = transpose (map fromPersistList allVals) + sqls=map (\(a,xs) -> connEscapeFieldName conn (fieldDB a) <> showSqlFilter pfilter <> "(" <> T.intercalate "," (replicate (length xs) " ?") <> ") ") (zip cfields xxs) + in (wrapSql (T.intercalate " and " (map wrapSql sqls)), concat xxs) + (_, NotIn, _) -> + let + xxs = transpose (map fromPersistList allVals) + sqls = map (\(a,xs) -> connEscapeFieldName conn (fieldDB a) <> showSqlFilter pfilter <> "(" <> T.intercalate "," (replicate (length xs) " ?") <> ") ") (zip cfields xxs) + in + (wrapSql (T.intercalate " or " (map wrapSql sqls)), concat xxs) + ([PersistList xs], _, True) -> + let zs = tail (inits (toList $ compositeFields pdef)) + sql1 = map (\b -> wrapSql (T.intercalate " and " (map (\(i,a) -> sql2 (i==length b) a) (zip [1..] b)))) zs + sql2 islast a = connEscapeFieldName conn (fieldDB a) <> (if islast then showSqlFilter pfilter else showSqlFilter Eq) <> "? " + sqlcl = T.intercalate " or " sql1 + in (wrapSql sqlcl, concat (tail (inits xs))) + (_, BackendSpecificFilter _, _) -> error "unhandled type BackendSpecificFilter for composite/non id primary keys" + _ -> error $ "unhandled type/filter for composite/non id primary keys pfilter=" ++ show pfilter ++ " persistList="++show allVals + (True, Just pdef, []) -> + error $ "empty list given as filter value filter=" ++ show pfilter ++ " persistList=" ++ show allVals ++ " pdef=" ++ show pdef + (True, Just pdef, _) -> + error $ "unhandled error for composite/non id primary keys filter=" ++ show pfilter ++ " persistList=" ++ show allVals ++ " pdef=" ++ show pdef + + _ -> case (isNull, pfilter, length notNullVals) of + (True, Eq, _) -> (name <> " IS NULL", []) + (True, Ne, _) -> (name <> " IS NOT NULL", []) + (False, Ne, _) -> (T.concat + [ "(" + , name + , " IS NULL OR " + , name + , " <> " + , qmarks + , ")" + ], notNullVals) + -- We use 1=2 (and below 1=1) to avoid using TRUE and FALSE, since + -- not all databases support those words directly. + (_, In, 0) -> ("1=2" <> orNullSuffix, []) + (False, In, _) -> (name <> " IN " <> qmarks <> orNullSuffix, allVals) + (True, In, _) -> (T.concat + [ "(" + , name + , " IS NULL OR " + , name + , " IN " + , qmarks + , ")" + ], notNullVals) + (False, NotIn, 0) -> ("1=1", []) + (True, NotIn, 0) -> (name <> " IS NOT NULL", []) + (False, NotIn, _) -> (T.concat + [ "(" + , name + , " IS NULL OR " + , name + , " NOT IN " + , qmarks + , ")" + ], notNullVals) + (True, NotIn, _) -> (T.concat + [ "(" + , name + , " IS NOT NULL AND " + , name + , " NOT IN " + , qmarks + , ")" + ], notNullVals) + _ -> (name <> showSqlFilter pfilter <> "?" <> orNullSuffix, allVals) where isCompFilter Lt = True @@ -457,8 +482,12 @@ orderClause includeTable conn o = $ connEscapeFieldName conn (fieldName x) -- | Generates sql for limit and offset for postgres, sqlite and mysql. -decorateSQLWithLimitOffset::Text -> (Int,Int) -> Bool -> Text -> Text -decorateSQLWithLimitOffset nolimit (limit,offset) _ sql = +decorateSQLWithLimitOffset + :: Text + -> (Int,Int) + -> Text + -> Text +decorateSQLWithLimitOffset nolimit (limit,offset) sql = let lim = case (limit, offset) of (0, 0) -> "" diff --git a/persistent/Database/Persist/Sql/Orphan/PersistStore.hs b/persistent/Database/Persist/Sql/Orphan/PersistStore.hs index e3420c255..1683c6a27 100644 --- a/persistent/Database/Persist/Sql/Orphan/PersistStore.hs +++ b/persistent/Database/Persist/Sql/Orphan/PersistStore.hs @@ -1,29 +1,28 @@ +{-# LANGUAGE DeriveGeneric #-} +{-# LANGUAGE DerivingStrategies #-} {-# LANGUAGE ExistentialQuantification #-} {-# LANGUAGE GeneralizedNewtypeDeriving #-} -{-# LANGUAGE DerivingStrategies #-} -{-# LANGUAGE DeriveGeneric #-} {-# OPTIONS_GHC -fno-warn-orphans #-} module Database.Persist.Sql.Orphan.PersistStore - ( withRawQuery - , BackendKey(..) - , toSqlKey - , fromSqlKey - , getFieldName - , getTableName - , tableDBName - , fieldDBName - ) where + ( withRawQuery + , BackendKey(..) + , toSqlKey + , fromSqlKey + , getFieldName + , getTableName + , tableDBName + , fieldDBName + ) where -import GHC.Generics (Generic) import Control.Exception (throwIO) import Control.Monad.IO.Class -import Control.Monad.Trans.Reader (ReaderT, ask, withReaderT) +import Control.Monad.Trans.Reader (ReaderT, ask) import Data.Acquire (with) import qualified Data.Aeson as A import Data.ByteString.Char8 (readInteger) -import Data.Conduit (ConduitM, (.|), runConduit) +import Data.Conduit (ConduitM, runConduit, (.|)) import qualified Data.Conduit.List as CL import qualified Data.Foldable as Foldable import Data.Function (on) @@ -35,17 +34,25 @@ import Data.Monoid (mappend, (<>)) import Data.Text (Text, unpack) import qualified Data.Text as T import Data.Void (Void) +import GHC.Generics (Generic) +import Web.HttpApiData (FromHttpApiData, ToHttpApiData) import Web.PathPieces (PathPiece) -import Web.HttpApiData (ToHttpApiData, FromHttpApiData) import Database.Persist import Database.Persist.Class () import Database.Persist.Sql.Class (PersistFieldSql) import Database.Persist.Sql.Raw import Database.Persist.Sql.Types -import Database.Persist.Sql.Util ( - dbIdColumns, keyAndEntityColumnNames, parseEntityValues, entityColumnNames - , updatePersistValue, mkUpdateText, commaSeparated, mkInsertValues) +import Database.Persist.Sql.Types.Internal +import Database.Persist.Sql.Util + ( commaSeparated + , dbIdColumns + , keyAndEntityColumnNames + , mkInsertValues + , mkUpdateText + , parseEntityValues + , updatePersistValue + ) withRawQuery :: MonadIO m => Text @@ -65,7 +72,8 @@ fromSqlKey = unSqlBackendKey . toBackendKey whereStmtForKey :: PersistEntity record => SqlBackend -> Key record -> Text whereStmtForKey conn k = T.intercalate " AND " - $ map (<> "=? ") + $ Foldable.toList + $ fmap (<> "=? ") $ dbIdColumns conn entDef where entDef = entityDef $ dummyFromKey k @@ -89,7 +97,7 @@ getTableName rec = withCompatibleBackend $ do -- | useful for a backend to implement tableName by adding escaping tableDBName :: (PersistEntity record) => record -> EntityNameDB -tableDBName rec = entityDB $ entityDef (Just rec) +tableDBName rec = getEntityDBName $ entityDef (Just rec) -- | get the SQL string for the field that an EntityField represents -- Useful for raw SQL queries @@ -194,10 +202,11 @@ instance PersistStoreWrite SqlBackend where ISRManyKeys sql fs -> do rawExecute sql vals case entityPrimary t of - Nothing -> error $ "ISRManyKeys is used when Primary is defined " ++ show sql + Nothing -> + error $ "ISRManyKeys is used when Primary is defined " ++ show sql Just pdef -> - let pks = map fieldHaskell $ compositeFields pdef - keyvals = map snd $ filter (\(a, _) -> let ret=isJust (find (== a) pks) in ret) $ zip (map fieldHaskell $ entityFields t) fs + let pks = Foldable.toList $ fmap fieldHaskell $ compositeFields pdef + keyvals = map snd $ filter (\(a, _) -> let ret=isJust (find (== a) pks) in ret) $ zip (map fieldHaskell $ getEntityFields t) fs in case keyFromValues keyvals of Right k -> return k Left e -> error $ "ISRManyKeys: unexpected keyvals result: " `mappend` unpack e @@ -224,7 +233,7 @@ instance PersistStoreWrite SqlBackend where ent = entityDef vals valss = map mkInsertValues vals - insertMany_ vals0 = runChunked (length $ entityFields t) insertMany_' vals0 + insertMany_ vals0 = runChunked (length $ getEntityFields t) insertMany_' vals0 where t = entityDef vals0 insertMany_' vals = do @@ -234,9 +243,9 @@ instance PersistStoreWrite SqlBackend where [ "INSERT INTO " , connEscapeTableName conn t , "(" - , T.intercalate "," $ map (connEscapeFieldName conn . fieldDB) $ entityFields t + , T.intercalate "," $ map (connEscapeFieldName conn . fieldDB) $ getEntityFields t , ") VALUES (" - , T.intercalate "),(" $ replicate (length valss) $ T.intercalate "," $ map (const "?") (entityFields t) + , T.intercalate "),(" $ replicate (length valss) $ T.intercalate "," $ map (const "?") (getEntityFields t) , ")" ] rawExecute sql (concat valss) @@ -249,7 +258,7 @@ instance PersistStoreWrite SqlBackend where [ "UPDATE " , connEscapeTableName conn t , " SET " - , T.intercalate "," (map (go conn . fieldDB) $ entityFields t) + , T.intercalate "," (map (go conn . fieldDB) $ getEntityFields t) , " WHERE " , wher ] @@ -322,7 +331,7 @@ instance PersistStoreRead SqlBackend where getMany ks@(k:_)= do conn <- ask let t = entityDef . dummyFromKey $ k - let cols = commaSeparated . entityColumnNames t + let cols = commaSeparated . Foldable.toList . keyAndEntityColumnNames t let wher = whereStmtForKeys conn ks let sql = T.concat [ "SELECT " @@ -334,7 +343,8 @@ instance PersistStoreRead SqlBackend where ] let parse vals = case parseEntityValues t vals of - Left s -> liftIO $ throwIO $ PersistMarshalError s + Left s -> liftIO $ throwIO $ + PersistMarshalError ("getBy: " <> s) Right row -> return row withRawQuery sql (Foldable.foldMap keyToValues ks) $ do es <- CL.mapM parse .| CL.consume @@ -360,7 +370,7 @@ insrepHelper :: (MonadIO m, PersistEntity val) insrepHelper _ [] = return () insrepHelper command es = do conn <- ask - let columnNames = keyAndEntityColumnNames entDef conn + let columnNames = Foldable.toList $ keyAndEntityColumnNames entDef conn rawExecute (sql conn columnNames) vals where entDef = entityDef $ map entityVal es @@ -371,7 +381,7 @@ insrepHelper command es = do , "(" , T.intercalate "," columnNames , ") VALUES (" - , T.intercalate "),(" $ replicate (length es) $ T.intercalate "," $ map (const "?") columnNames + , T.intercalate "),(" $ replicate (length es) $ T.intercalate "," $ fmap (const "?") columnNames , ")" ] vals = Foldable.foldMap entityValues es diff --git a/persistent/Database/Persist/Sql/Orphan/PersistUnique.hs b/persistent/Database/Persist/Sql/Orphan/PersistUnique.hs index f9a0c62d3..27c01be99 100644 --- a/persistent/Database/Persist/Sql/Orphan/PersistUnique.hs +++ b/persistent/Database/Persist/Sql/Orphan/PersistUnique.hs @@ -10,14 +10,14 @@ import Control.Monad.Trans.Reader (ask) import qualified Data.Conduit.List as CL import Data.Function (on) import Data.List (nubBy) -import qualified Data.List.NonEmpty as NEL import Data.Monoid (mappend) import qualified Data.Text as T +import Data.Foldable (toList) import Database.Persist import Database.Persist.Class.PersistUnique (defaultUpsertBy, defaultPutMany, persistUniqueKeyValues) -import Database.Persist.Sql.Types +import Database.Persist.Sql.Types.Internal import Database.Persist.Sql.Raw import Database.Persist.Sql.Orphan.PersistStore (withRawQuery) import Database.Persist.Sql.Util (dbColumns, parseEntityValues, updatePersistValue, mkUpdateText') @@ -32,7 +32,7 @@ instance PersistUniqueWrite SqlBackend where [] -> defaultUpsertBy uniqueKey record updates _:_ -> do let upds = T.intercalate "," $ map mkUpdateText updates - sql = upsertSql t (NEL.fromList $ persistUniqueToFieldNames uniqueKey) upds + sql = upsertSql t (persistUniqueToFieldNames uniqueKey) upds vals = map toPersistValue (toPersistFields record) ++ map updatePersistValue updates ++ unqs uniqueKey @@ -51,7 +51,7 @@ instance PersistUniqueWrite SqlBackend where rawExecute sql' vals where t = entityDef $ dummyFromUnique uniq - go = map snd . persistUniqueToFieldNames + go = toList . fmap snd . persistUniqueToFieldNames go' conn x = connEscapeFieldName conn x `mappend` "=?" sql conn = T.concat @@ -88,7 +88,7 @@ instance PersistUniqueRead SqlBackend where let sql = T.concat [ "SELECT " - , T.intercalate "," $ dbColumns conn t + , T.intercalate "," $ toList $ dbColumns conn t , " FROM " , connEscapeTableName conn t , " WHERE " @@ -109,7 +109,7 @@ instance PersistUniqueRead SqlBackend where T.intercalate " AND " $ map (go conn) $ toFieldNames' uniq go conn x = connEscapeFieldName conn x `mappend` "=?" t = entityDef $ dummyFromUnique uniq - toFieldNames' = map snd . persistUniqueToFieldNames + toFieldNames' = toList . fmap snd . persistUniqueToFieldNames instance PersistUniqueRead SqlReadBackend where getBy uniq = withBaseBackend $ getBy uniq diff --git a/persistent/Database/Persist/Sql/Raw.hs b/persistent/Database/Persist/Sql/Raw.hs index b3bd2b72e..4de7f0ef9 100644 --- a/persistent/Database/Persist/Sql/Raw.hs +++ b/persistent/Database/Persist/Sql/Raw.hs @@ -16,6 +16,7 @@ import qualified Data.Text as T import Database.Persist import Database.Persist.Sql.Types +import Database.Persist.Sql.Types.Internal import Database.Persist.Sql.Class rawQuery :: (MonadResource m, MonadReader env m, BackendCompatible SqlBackend env) @@ -66,8 +67,8 @@ rawExecuteCount sql vals = do return res getStmt - :: (MonadIO m, BackendCompatible SqlBackend backend) - => Text -> ReaderT backend m Statement + :: (MonadIO m, MonadReader backend m, BackendCompatible SqlBackend backend) + => Text -> m Statement getStmt sql = do conn <- projectBackend `liftM` ask liftIO $ getStmtConn conn sql diff --git a/persistent/Database/Persist/Sql/Run.hs b/persistent/Database/Persist/Sql/Run.hs index 2bc79b3ea..d66c72a9c 100644 --- a/persistent/Database/Persist/Sql/Run.hs +++ b/persistent/Database/Persist/Sql/Run.hs @@ -17,7 +17,7 @@ import qualified Data.Text as T import Database.Persist.Class.PersistStore import Database.Persist.Sql.Types -import Database.Persist.Sql.Types.Internal (IsolationLevel) +import Database.Persist.Sql.Types.Internal import Database.Persist.Sql.Raw -- | Get a connection from the pool, run the given action, and then return the diff --git a/persistent/Database/Persist/Sql/Types.hs b/persistent/Database/Persist/Sql/Types.hs index 9d5e870d7..8df81a30f 100644 --- a/persistent/Database/Persist/Sql/Types.hs +++ b/persistent/Database/Persist/Sql/Types.hs @@ -1,6 +1,6 @@ module Database.Persist.Sql.Types ( module Database.Persist.Sql.Types - , SqlBackend (..), SqlReadBackend (..), SqlWriteBackend (..) + , SqlBackend, SqlReadBackend (..), SqlWriteBackend (..) , Statement (..), LogFunc, InsertSqlResult (..) , readToUnknown, readToWrite, writeToUnknown , SqlBackendCanRead, SqlBackendCanWrite, SqlReadT, SqlWriteT, IsSqlBackend @@ -12,15 +12,15 @@ import Database.Persist.Types.Base (FieldCascade) import Control.Exception (Exception(..)) import Control.Monad.Logger (NoLoggingT) -import Control.Monad.Trans.Reader (ReaderT (..)) +import Control.Monad.Trans.Reader (ReaderT(..)) import Control.Monad.Trans.Resource (ResourceT) import Control.Monad.Trans.Writer (WriterT) import Data.Pool (Pool) import Data.Text (Text, unpack) -import Database.Persist.Types -import Database.Persist.Sql.Types.Internal import Data.Time (NominalDiffTime) +import Database.Persist.Sql.Types.Internal +import Database.Persist.Types data Column = Column { cName :: !FieldNameDB diff --git a/persistent/Database/Persist/Sql/Types/Internal.hs b/persistent/Database/Persist/Sql/Types/Internal.hs index 9ba89cde3..caa3b998f 100644 --- a/persistent/Database/Persist/Sql/Types/Internal.hs +++ b/persistent/Database/Persist/Sql/Types/Internal.hs @@ -1,5 +1,11 @@ {-# LANGUAGE ConstraintKinds #-} {-# LANGUAGE RankNTypes #-} +{-# language RecordWildCards #-} +{-# language DuplicateRecordFields #-} + +-- | Breaking changes to this module are not reflected in the major version +-- number. Prefer to import from "Database.Persist.Sql" instead. If you neeed +-- something from this module, please file an issue on GitHub. module Database.Persist.Sql.Types.Internal ( HasPersistBackend (..) , IsPersistBackend (..) @@ -21,188 +27,28 @@ module Database.Persist.Sql.Types.Internal , IsSqlBackend ) where -import Data.List.NonEmpty (NonEmpty(..)) -import Control.Monad.IO.Class (MonadIO (..)) -import Control.Monad.Logger (LogSource, LogLevel, Loc) import Control.Monad.Trans.Class (lift) import Control.Monad.Trans.Reader (ReaderT, runReaderT, ask) -import Data.Acquire (Acquire) -import Data.Conduit (ConduitM) -import Data.Int (Int64) -import Data.IORef (IORef) -import Data.Map (Map) import Data.Monoid ((<>)) -import Data.String (IsString) -import Data.Text (Text) -import System.Log.FastLogger (LogStr) import Database.Persist.Class - ( HasPersistBackend (..) - , PersistQueryRead, PersistQueryWrite - , PersistStoreRead, PersistStoreWrite - , PersistUniqueRead, PersistUniqueWrite - , BackendCompatible(..) - ) + ( HasPersistBackend (..) + , PersistQueryRead, PersistQueryWrite + , PersistStoreRead, PersistStoreWrite + , PersistUniqueRead, PersistUniqueWrite + , BackendCompatible(..) + ) import Database.Persist.Class.PersistStore (IsPersistBackend (..)) -import Database.Persist.Types - -type LogFunc = Loc -> LogSource -> LogLevel -> LogStr -> IO () - -data InsertSqlResult = ISRSingle Text - | ISRInsertGet Text Text - | ISRManyKeys Text [PersistValue] - -data Statement = Statement - { stmtFinalize :: IO () - , stmtReset :: IO () - , stmtExecute :: [PersistValue] -> IO Int64 - , stmtQuery :: forall m. MonadIO m - => [PersistValue] - -> Acquire (ConduitM () [PersistValue] m ()) - } - --- | Please refer to the documentation for the database in question for a full --- overview of the semantics of the varying isloation levels -data IsolationLevel = ReadUncommitted - | ReadCommitted - | RepeatableRead - | Serializable - deriving (Show, Eq, Enum, Ord, Bounded) - -makeIsolationLevelStatement :: (Monoid s, IsString s) => IsolationLevel -> s -makeIsolationLevelStatement l = "SET TRANSACTION ISOLATION LEVEL " <> case l of - ReadUncommitted -> "READ UNCOMMITTED" - ReadCommitted -> "READ COMMITTED" - RepeatableRead -> "REPEATABLE READ" - Serializable -> "SERIALIZABLE" - --- | A 'SqlBackend' represents a handle or connection to a database. It --- contains functions and values that allow databases to have more --- optimized implementations, as well as references that benefit --- performance and sharing. --- --- A 'SqlBackend' is *not* thread-safe. You should not assume that --- a 'SqlBackend' can be shared among threads and run concurrent queries. --- This *will* result in problems. Instead, you should create a @'Pool' --- 'SqlBackend'@, known as a 'ConnectionPool', and pass that around in --- multi-threaded applications. --- --- To run actions in the @persistent@ library, you should use the --- 'runSqlConn' function. If you're using a multithreaded application, use --- the 'runSqlPool' function. -data SqlBackend = SqlBackend - { connPrepare :: Text -> IO Statement - -- ^ This function should prepare a 'Statement' in the target database, - -- which should allow for efficient query reuse. - , connInsertSql :: EntityDef -> [PersistValue] -> InsertSqlResult - -- ^ This function generates the SQL and values necessary for - -- performing an insert against the database. - , connInsertManySql :: Maybe (EntityDef -> [[PersistValue]] -> InsertSqlResult) - -- ^ SQL for inserting many rows and returning their primary keys, for - -- backends that support this functionality. If 'Nothing', rows will be - -- inserted one-at-a-time using 'connInsertSql'. - , connUpsertSql :: Maybe (EntityDef -> NonEmpty (FieldNameHS, FieldNameDB) -> Text -> Text) - -- ^ Some databases support performing UPSERT _and_ RETURN entity - -- in a single call. - -- - -- This field when set will be used to generate the UPSERT+RETURN sql given - -- * an entity definition - -- * updates to be run on unique key(s) collision - -- - -- When left as 'Nothing', we find the unique key from entity def before - -- * trying to fetch an entity by said key - -- * perform an update when result found, else issue an insert - -- * return new entity from db - -- - -- @since 2.6 - , connPutManySql :: Maybe (EntityDef -> Int -> Text) - -- ^ Some databases support performing bulk UPSERT, specifically - -- "insert or replace many records" in a single call. - -- - -- This field when set, given - -- * an entity definition - -- * number of records to be inserted - -- should produce a PUT MANY sql with placeholders for records - -- - -- When left as 'Nothing', we default to using 'defaultPutMany'. - -- - -- @since 2.8.1 - , connStmtMap :: IORef (Map Text Statement) - -- ^ A reference to the cache of statements. 'Statement's are keyed by - -- the 'Text' queries that generated them. - , connClose :: IO () - -- ^ Close the underlying connection. - , connMigrateSql - :: [EntityDef] - -> (Text -> IO Statement) - -> EntityDef - -> IO (Either [Text] [(Bool, Text)]) - -- ^ This function returns the migrations required to include the - -- 'EntityDef' parameter in the @['EntityDef']@ database. This might - -- include creating a new table if the entity is not present, or - -- altering an existing table if it is. - , connBegin :: (Text -> IO Statement) -> Maybe IsolationLevel -> IO () - -- ^ A function to begin a transaction for the underlying database. - , connCommit :: (Text -> IO Statement) -> IO () - -- ^ A function to commit a transaction to the underlying database. - , connRollback :: (Text -> IO Statement) -> IO () - -- ^ A function to roll back a transaction on the underlying database. - , connEscapeFieldName :: FieldNameDB -> Text - -- ^ A function to extract and escape the name of the column corresponding - -- to the provided field. - -- - -- @since 2.12.0.0 - , connEscapeTableName :: EntityDef -> Text - -- ^ A function to extract and escape the name of the table corresponding - -- to the provided entity. PostgreSQL uses this to support schemas. - -- - -- @since 2.12.0.0 - , connEscapeRawName :: Text -> Text - -- ^ A function to escape raw DB identifiers. MySQL uses backticks, while - -- PostgreSQL uses quotes, and so on. - -- - -- @since 2.12.0.0 - , connNoLimit :: Text - , connRDBMS :: Text - -- ^ A tag displaying what database the 'SqlBackend' is for. Can be - -- used to differentiate features in downstream libraries for different - -- database backends. - , connLimitOffset :: (Int,Int) -> Bool -> Text -> Text - -- ^ Attach a 'LIMIT/OFFSET' clause to a SQL query. Note that - -- LIMIT/OFFSET is problematic for performance, and indexed range - -- queries are the superior way to offer pagination. - , connLogFunc :: LogFunc - -- ^ A log function for the 'SqlBackend' to use. - , connMaxParams :: Maybe Int - -- ^ Some databases (probably only Sqlite) have a limit on how - -- many question-mark parameters may be used in a statement - -- - -- @since 2.6.1 - , connRepsertManySql :: Maybe (EntityDef -> Int -> Text) - -- ^ Some databases support performing bulk an atomic+bulk INSERT where - -- constraint conflicting entities can replace existing entities. - -- - -- This field when set, given - -- * an entity definition - -- * number of records to be inserted - -- should produce a INSERT sql with placeholders for primary+record fields - -- - -- When left as 'Nothing', we default to using 'defaultRepsertMany'. - -- - -- @since 2.9.0 - } - -instance HasPersistBackend SqlBackend where - type BaseBackend SqlBackend = SqlBackend - persistBackend = id - -instance IsPersistBackend SqlBackend where - mkPersistBackend = id +import Database.Persist.SqlBackend.Internal +import Database.Persist.SqlBackend.Internal.InsertSqlResult +import Database.Persist.SqlBackend.Internal.MkSqlBackend +import Database.Persist.SqlBackend.Internal.Statement +import Database.Persist.SqlBackend.Internal.IsolationLevel -- | An SQL backend which can only handle read queries -- -- The constructor was exposed in 2.10.0. -newtype SqlReadBackend = SqlReadBackend { unSqlReadBackend :: SqlBackend } +newtype SqlReadBackend = SqlReadBackend { unSqlReadBackend :: SqlBackend } instance HasPersistBackend SqlReadBackend where type BaseBackend SqlReadBackend = SqlBackend @@ -260,4 +106,7 @@ type SqlReadT m a = forall backend. (SqlBackendCanRead backend) => ReaderT backe type SqlWriteT m a = forall backend. (SqlBackendCanWrite backend) => ReaderT backend m a -- | A backend which is a wrapper around @SqlBackend@. -type IsSqlBackend backend = (IsPersistBackend backend, BaseBackend backend ~ SqlBackend) +type IsSqlBackend backend = + ( IsPersistBackend backend + , BaseBackend backend ~ SqlBackend + ) diff --git a/persistent/Database/Persist/Sql/Util.hs b/persistent/Database/Persist/Sql/Util.hs index d68e55320..e9a61ecf1 100644 --- a/persistent/Database/Persist/Sql/Util.hs +++ b/persistent/Database/Persist/Sql/Util.hs @@ -1,12 +1,12 @@ +{-# LANGUAGE ScopedTypeVariables #-} + module Database.Persist.Sql.Util ( parseEntityValues - , entityColumnNames , keyAndEntityColumnNames , entityColumnCount , isIdField - , hasCompositeKey - , hasCompositePrimaryKey , hasNaturalKey + , hasCompositePrimaryKey , dbIdColumns , dbIdColumnsEsc , dbColumns @@ -20,40 +20,46 @@ module Database.Persist.Sql.Util , mkInsertPlaceholders ) where +import Data.List.NonEmpty (NonEmpty(..)) import qualified Data.Maybe as Maybe import Data.Monoid ((<>)) import Data.Text (Text, pack) import qualified Data.Text as T -import Database.Persist ( - Entity(Entity), EntityDef, EntityField, FieldNameHS(FieldNameHS) - , PersistEntity(..), PersistValue - , keyFromValues, fromPersistValues, fieldDB, entityId, entityPrimary - , entityFields, entityKeyFields, fieldHaskell, compositeFields, persistFieldDef - , keyAndEntityFields, toPersistValue, FieldNameDB, Update(..), PersistUpdate(..) - , FieldDef(..) - ) +import Database.Persist + ( Entity(Entity) + , EntityDef + , EntityField + , FieldDef(..) + , FieldNameDB + , FieldNameHS(FieldNameHS) + , PersistEntity(..) + , PersistUpdate(..) + , PersistValue + , Update(..) + , compositeFields + , entityPrimary + , fieldDB + , fieldHaskell + , fromPersistValues + , getEntityFields + , getEntityKeyFields + , keyAndEntityFields + , keyFromValues + , persistFieldDef + , toPersistValue + ) -import Database.Persist.Sql.Types (Sql, SqlBackend, connEscapeFieldName) +import Database.Persist.Sql.Types (Sql) +import Database.Persist.SqlBackend.Internal (SqlBackend(..)) -entityColumnNames :: EntityDef -> SqlBackend -> [Sql] -entityColumnNames ent conn = - (if hasCompositeKey ent - then [] else [connEscapeFieldName conn . fieldDB $ entityId ent]) - <> map (connEscapeFieldName conn . fieldDB) (entityFields ent) - -keyAndEntityColumnNames :: EntityDef -> SqlBackend -> [Sql] -keyAndEntityColumnNames ent conn = map (connEscapeFieldName conn . fieldDB) (keyAndEntityFields ent) +keyAndEntityColumnNames :: EntityDef -> SqlBackend -> NonEmpty Sql +keyAndEntityColumnNames ent conn = + fmap (connEscapeFieldName conn . fieldDB) (keyAndEntityFields ent) entityColumnCount :: EntityDef -> Int -entityColumnCount e = length (entityFields e) - + if hasCompositeKey e then 0 else 1 - -{-# DEPRECATED hasCompositeKey "hasCompositeKey is misleading - it returns True if the entity is defined with the Primary keyword. See issue #685 for discussion. \n If you want the same behavior, use 'hasNaturalKey'. If you want to know if the key has multiple fields, use 'hasCompositePrimaryKey'. This function will be removed in the next major version." #-} --- | Deprecated as of 2.11. See 'hasNaturalKey' or 'hasCompositePrimaryKey' --- for replacements. -hasCompositeKey :: EntityDef -> Bool -hasCompositeKey = Maybe.isJust . entityPrimary +entityColumnCount e = length (getEntityFields e) + + if hasNaturalKey e then 0 else 1 -- | Returns 'True' if the entity has a natural key defined with the -- Primary keyword. @@ -137,35 +143,33 @@ hasCompositePrimaryKey ed = case entityPrimary ed of Just cdef -> case compositeFields cdef of - (_ : _ : _) -> + (_ :| _ : _) -> True _ -> False Nothing -> False -dbIdColumns :: SqlBackend -> EntityDef -> [Text] +dbIdColumns :: SqlBackend -> EntityDef -> NonEmpty Text dbIdColumns conn = dbIdColumnsEsc (connEscapeFieldName conn) -dbIdColumnsEsc :: (FieldNameDB -> Text) -> EntityDef -> [Text] -dbIdColumnsEsc esc t = map (esc . fieldDB) $ entityKeyFields t +dbIdColumnsEsc :: (FieldNameDB -> Text) -> EntityDef -> NonEmpty Text +dbIdColumnsEsc esc t = fmap (esc . fieldDB) $ getEntityKeyFields t -dbColumns :: SqlBackend -> EntityDef -> [Text] -dbColumns conn t = case entityPrimary t of - Just _ -> flds - Nothing -> escapeColumn (entityId t) : flds +dbColumns :: SqlBackend -> EntityDef -> NonEmpty Text +dbColumns conn = + fmap escapeColumn . keyAndEntityFields where escapeColumn = connEscapeFieldName conn . fieldDB - flds = map escapeColumn (entityFields t) parseEntityValues :: PersistEntity record => EntityDef -> [PersistValue] -> Either Text (Entity record) parseEntityValues t vals = case entityPrimary t of Just pdef -> - let pks = map fieldHaskell $ compositeFields pdef + let pks = fmap fieldHaskell $ compositeFields pdef keyvals = map snd . filter ((`elem` pks) . fst) - $ zip (map fieldHaskell $ entityFields t) vals + $ zip (map fieldHaskell $ getEntityFields t) vals in fromPersistValuesComposite' keyvals vals Nothing -> fromPersistValues' vals where @@ -188,7 +192,10 @@ parseEntityValues t vals = Right key -> Right (Entity key xs') -isIdField :: PersistEntity record => EntityField record typ -> Bool +isIdField + :: forall record typ. (PersistEntity record) + => EntityField record typ + -> Bool isIdField f = fieldHaskell (persistFieldDef f) == FieldNameHS "Id" -- | Gets the 'FieldDef' for an 'Update'. @@ -236,7 +243,7 @@ mkInsertValues -> [PersistValue] mkInsertValues entity = Maybe.catMaybes - . zipWith redactGeneratedCol (entityFields . entityDef $ Just entity) + . zipWith redactGeneratedCol (getEntityFields . entityDef $ Just entity) . map toPersistValue $ toPersistFields entity where @@ -258,7 +265,7 @@ mkInsertPlaceholders -- ^ An `escape` function -> [(Text, Text)] mkInsertPlaceholders ed escape = - Maybe.mapMaybe redactGeneratedCol (entityFields ed) + Maybe.mapMaybe redactGeneratedCol (getEntityFields ed) where redactGeneratedCol fd = case fieldGenerated fd of Nothing -> diff --git a/persistent/Database/Persist/SqlBackend.hs b/persistent/Database/Persist/SqlBackend.hs new file mode 100644 index 000000000..2c3a2cf0d --- /dev/null +++ b/persistent/Database/Persist/SqlBackend.hs @@ -0,0 +1,190 @@ +-- | This module contains types and information necessary for a SQL database. +-- Database support libraries, like @persistent-postgresql@, will be responsible +-- for constructing these values. +module Database.Persist.SqlBackend + ( -- * The type and construction + SqlBackend + , mkSqlBackend + , MkSqlBackendArgs(..) + -- * Utilities + + -- $utilities + + -- ** SqlBackend Getters + , getEscapedFieldName + , getEscapedRawName + , getEscapeRawNameFunction + , getConnLimitOffset + , getConnUpsertSql + -- ** SqlBackend Setters + , setConnMaxParams + , setConnRepsertManySql + , setConnInsertManySql + , setConnUpsertSql + , setConnPutManySql + ) where + +import Control.Monad.Reader +import Data.Text (Text) +import Database.Persist.Class.PersistStore (BackendCompatible(..)) +import Database.Persist.SqlBackend.Internal +import qualified Database.Persist.SqlBackend.Internal as SqlBackend + (SqlBackend(..)) +import Database.Persist.SqlBackend.Internal.MkSqlBackend as Mk (MkSqlBackendArgs(..)) +import Database.Persist.Types.Base +import Database.Persist.Names +import Database.Persist.SqlBackend.Internal.InsertSqlResult +import Data.List.NonEmpty (NonEmpty) + +-- $utilities +-- +-- The functions exported here are a bit more general than the record accessors. +-- The easiest way to use them is to provide the 'SqlBackend' directly to the +-- function. However, you can also use them in a 'ReaderT' context, and you can +-- even use them with any @backend@ type tht has a @'BackendCompatible' +-- 'SqlBackend' backend@ instance. + +-- | This function can be used directly with a 'SqlBackend' to escape +-- a 'FieldNameDB'. +-- +-- @ +-- let conn :: SqlBackend +-- getEscapedFieldName (FieldNameDB "asdf") conn +-- @ +-- +-- Alternatively, you can use it in a @'ReaderT' 'SqlBackend'@ context, like +-- 'SqlPersistT': +-- +-- @ +-- query :: SqlPersistM Text +-- query = do +-- field <- getEscapedFieldName (FieldNameDB "asdf") +-- pure field +-- @ +-- +-- @since 2.13.0.0 +getEscapedFieldName + :: (BackendCompatible SqlBackend backend, MonadReader backend m) + => FieldNameDB -> m Text +getEscapedFieldName fieldName = do + func <- asks (SqlBackend.connEscapeFieldName . projectBackend) + pure (func fieldName) + +-- | This function can be used directly with a 'SqlBackend' to escape +-- a raw 'Text'. +-- +-- @ +-- let conn :: SqlBackend +-- getEscapedRawName (FieldNameDB "asdf") conn +-- @ +-- +-- Alternatively, you can use it in a @'ReaderT' 'SqlBackend'@ context, like +-- 'SqlPersistT': +-- +-- @ +-- query :: SqlPersistM Text +-- query = do +-- field <- getEscapedRawName (FieldNameDB "asdf") +-- pure field +-- @ +-- +-- @since 2.13.0.0 +getEscapedRawName + :: (BackendCompatible SqlBackend backend, MonadReader backend m) + => Text -> m Text +getEscapedRawName name = do + func <- getEscapeRawNameFunction + pure (func name) + +-- | Return the function for escaping a raw name. +-- +-- @since 2.13.0.0 +getEscapeRawNameFunction + :: (BackendCompatible SqlBackend backend, MonadReader backend m) + => m (Text -> Text) +getEscapeRawNameFunction = do + asks (SqlBackend.connEscapeRawName . projectBackend) + +-- | Decorate the given SQL query with the @(LIMIT, OFFSET)@ specified. +-- +-- @since 2.13.0.0 +getConnLimitOffset + :: (BackendCompatible SqlBackend backend, MonadReader backend m) + => (Int, Int) + -- ^ The @(LIMIT, OFFSET)@ to put on the query. + -> Text + -- ^ The SQL query that the LIMIT/OFFSET clause will be attached to. + -> m Text +getConnLimitOffset limitOffset sql = do + func <- asks (SqlBackend.connLimitOffset . projectBackend) + pure $ func limitOffset sql + +-- | Retrieve the function for generating an upsert statement, if the backend +-- supports it. +-- +-- @since 2.13.0.0 +getConnUpsertSql + :: (BackendCompatible SqlBackend backend, MonadReader backend m) + => m (Maybe (EntityDef -> NonEmpty (FieldNameHS, FieldNameDB) -> Text -> Text)) +getConnUpsertSql = do + asks (SqlBackend.connUpsertSql . projectBackend) + + +-- | Set the maximum parameters that may be issued in a given SQL query. This +-- should be used only if the database backend have this limitation. +-- +-- @since 2.13.0.0 +setConnMaxParams + :: Int + -> SqlBackend + -> SqlBackend +setConnMaxParams i sb = + sb { connMaxParams = Just i } + +-- | Set the 'connRepsertManySql' field on the 'SqlBackend'. This should only be +-- set by the database backend library. If this is not set, a slow default will +-- be used. +-- +-- @since 2.13.0.0 +setConnRepsertManySql + :: (EntityDef -> Int -> Text) + -> SqlBackend + -> SqlBackend +setConnRepsertManySql mkQuery sb = + sb { connRepsertManySql = Just mkQuery } + +-- | Set the 'connInsertManySql' field on the 'SqlBackend'. This should only be +-- used by the database backend library to provide an efficient implementation +-- of a bulk insert function. If this is not set, a slow default will be used. +-- +-- @since 2.13.0.0 +setConnInsertManySql + :: (EntityDef -> [[PersistValue]] -> InsertSqlResult) + -> SqlBackend + -> SqlBackend +setConnInsertManySql mkQuery sb = + sb { connInsertManySql = Just mkQuery } + +-- | Set the 'connUpsertSql' field on the 'SqlBackend'. This should only be used +-- by the database backend library to provide an efficient implementation of +-- a bulk insert function. If this is not set, a slow default will be used. +-- +-- @since 2.13.0.0 +setConnUpsertSql + :: (EntityDef -> NonEmpty (FieldNameHS, FieldNameDB) -> Text -> Text) + -> SqlBackend + -> SqlBackend +setConnUpsertSql mkQuery sb = + sb { connUpsertSql = Just mkQuery } + +-- | Set the 'connPutManySql field on the 'SqlBackend'. This should only be used +-- by the database backend library to provide an efficient implementation of +-- a bulk insert function. If this is not set, a slow default will be used. +-- +-- @since 2.13.0.0 +setConnPutManySql + :: (EntityDef -> Int -> Text) + -> SqlBackend + -> SqlBackend +setConnPutManySql mkQuery sb = + sb { connPutManySql = Just mkQuery } diff --git a/persistent/Database/Persist/SqlBackend/Internal.hs b/persistent/Database/Persist/SqlBackend/Internal.hs new file mode 100644 index 000000000..c059845ad --- /dev/null +++ b/persistent/Database/Persist/SqlBackend/Internal.hs @@ -0,0 +1,159 @@ +{-# language RecordWildCards #-} +{-# language RankNTypes #-} + +module Database.Persist.SqlBackend.Internal where + +import Data.Map (Map) +import Data.List.NonEmpty (NonEmpty) +import Data.Text (Text) +import Database.Persist.Class.PersistStore +import Database.Persist.Types.Base +import Database.Persist.Names +import Data.IORef +import Database.Persist.SqlBackend.Internal.MkSqlBackend +import Database.Persist.SqlBackend.Internal.Statement +import Database.Persist.SqlBackend.Internal.InsertSqlResult +import Database.Persist.SqlBackend.Internal.IsolationLevel + +-- | A 'SqlBackend' represents a handle or connection to a database. It +-- contains functions and values that allow databases to have more +-- optimized implementations, as well as references that benefit +-- performance and sharing. +-- +-- Instead of using the 'SqlBackend' constructor directly, use the +-- 'mkSqlBackend' function. +-- +-- A 'SqlBackend' is *not* thread-safe. You should not assume that +-- a 'SqlBackend' can be shared among threads and run concurrent queries. +-- This *will* result in problems. Instead, you should create a @'Pool' +-- 'SqlBackend'@, known as a 'ConnectionPool', and pass that around in +-- multi-threaded applications. +-- +-- To run actions in the @persistent@ library, you should use the +-- 'runSqlConn' function. If you're using a multithreaded application, use +-- the 'runSqlPool' function. +data SqlBackend = SqlBackend + { connPrepare :: Text -> IO Statement + -- ^ This function should prepare a 'Statement' in the target database, + -- which should allow for efficient query reuse. + , connInsertSql :: EntityDef -> [PersistValue] -> InsertSqlResult + -- ^ This function generates the SQL and values necessary for + -- performing an insert against the database. + , connInsertManySql :: Maybe (EntityDef -> [[PersistValue]] -> InsertSqlResult) + -- ^ SQL for inserting many rows and returning their primary keys, for + -- backends that support this functionality. If 'Nothing', rows will be + -- inserted one-at-a-time using 'connInsertSql'. + , connUpsertSql :: Maybe (EntityDef -> NonEmpty (FieldNameHS, FieldNameDB) -> Text -> Text) + -- ^ Some databases support performing UPSERT _and_ RETURN entity + -- in a single call. + -- + -- This field when set will be used to generate the UPSERT+RETURN sql given + -- * an entity definition + -- * updates to be run on unique key(s) collision + -- + -- When left as 'Nothing', we find the unique key from entity def before + -- * trying to fetch an entity by said key + -- * perform an update when result found, else issue an insert + -- * return new entity from db + -- + -- @since 2.6 + , connPutManySql :: Maybe (EntityDef -> Int -> Text) + -- ^ Some databases support performing bulk UPSERT, specifically + -- "insert or replace many records" in a single call. + -- + -- This field when set, given + -- * an entity definition + -- * number of records to be inserted + -- should produce a PUT MANY sql with placeholders for records + -- + -- When left as 'Nothing', we default to using 'defaultPutMany'. + -- + -- @since 2.8.1 + , connStmtMap :: IORef (Map Text Statement) + -- ^ A reference to the cache of statements. 'Statement's are keyed by + -- the 'Text' queries that generated them. + , connClose :: IO () + -- ^ Close the underlying connection. + , connMigrateSql + :: [EntityDef] + -> (Text -> IO Statement) + -> EntityDef + -> IO (Either [Text] [(Bool, Text)]) + -- ^ This function returns the migrations required to include the + -- 'EntityDef' parameter in the @['EntityDef']@ database. This might + -- include creating a new table if the entity is not present, or + -- altering an existing table if it is. + , connBegin :: (Text -> IO Statement) -> Maybe IsolationLevel -> IO () + -- ^ A function to begin a transaction for the underlying database. + , connCommit :: (Text -> IO Statement) -> IO () + -- ^ A function to commit a transaction to the underlying database. + , connRollback :: (Text -> IO Statement) -> IO () + -- ^ A function to roll back a transaction on the underlying database. + , connEscapeFieldName :: FieldNameDB -> Text + -- ^ A function to extract and escape the name of the column corresponding + -- to the provided field. + -- + -- @since 2.12.0.0 + , connEscapeTableName :: EntityDef -> Text + -- ^ A function to extract and escape the name of the table corresponding + -- to the provided entity. PostgreSQL uses this to support schemas. + -- + -- @since 2.12.0.0 + , connEscapeRawName :: Text -> Text + -- ^ A function to escape raw DB identifiers. MySQL uses backticks, while + -- PostgreSQL uses quotes, and so on. + -- + -- @since 2.12.0.0 + , connNoLimit :: Text + , connRDBMS :: Text + -- ^ A tag displaying what database the 'SqlBackend' is for. Can be + -- used to differentiate features in downstream libraries for different + -- database backends. + , connLimitOffset :: (Int,Int) -> Text -> Text + -- ^ Attach a 'LIMIT/OFFSET' clause to a SQL query. Note that + -- LIMIT/OFFSET is problematic for performance, and indexed range + -- queries are the superior way to offer pagination. + , connLogFunc :: LogFunc + -- ^ A log function for the 'SqlBackend' to use. + , connMaxParams :: Maybe Int + -- ^ Some databases (probably only Sqlite) have a limit on how + -- many question-mark parameters may be used in a statement + -- + -- @since 2.6.1 + , connRepsertManySql :: Maybe (EntityDef -> Int -> Text) + -- ^ Some databases support performing bulk an atomic+bulk INSERT where + -- constraint conflicting entities can replace existing entities. + -- + -- This field when set, given + -- * an entity definition + -- * number of records to be inserted + -- should produce a INSERT sql with placeholders for primary+record fields + -- + -- When left as 'Nothing', we default to using 'defaultRepsertMany'. + -- + -- @since 2.9.0 + } + +-- | A function for creating a value of the 'SqlBackend' type. You should prefer +-- to use this instead of the constructor for 'SqlBackend', because default +-- values for this will be provided for new fields on the record when new +-- functionality is added. +-- +-- @since 2.13.0.0 +mkSqlBackend :: MkSqlBackendArgs -> SqlBackend +mkSqlBackend MkSqlBackendArgs {..} = + SqlBackend + { connMaxParams = Nothing + , connRepsertManySql = Nothing + , connPutManySql = Nothing + , connUpsertSql = Nothing + , connInsertManySql = Nothing + , .. + } + +instance HasPersistBackend SqlBackend where + type BaseBackend SqlBackend = SqlBackend + persistBackend = id + +instance IsPersistBackend SqlBackend where + mkPersistBackend = id diff --git a/persistent/Database/Persist/SqlBackend/Internal/InsertSqlResult.hs b/persistent/Database/Persist/SqlBackend/Internal/InsertSqlResult.hs new file mode 100644 index 000000000..90a69528b --- /dev/null +++ b/persistent/Database/Persist/SqlBackend/Internal/InsertSqlResult.hs @@ -0,0 +1,9 @@ +module Database.Persist.SqlBackend.Internal.InsertSqlResult where + +import Database.Persist.Types.Base (PersistValue) +import Data.Text (Text) + +data InsertSqlResult + = ISRSingle Text + | ISRInsertGet Text Text + | ISRManyKeys Text [PersistValue] diff --git a/persistent/Database/Persist/SqlBackend/Internal/IsolationLevel.hs b/persistent/Database/Persist/SqlBackend/Internal/IsolationLevel.hs new file mode 100644 index 000000000..d4c9926bd --- /dev/null +++ b/persistent/Database/Persist/SqlBackend/Internal/IsolationLevel.hs @@ -0,0 +1,19 @@ +module Database.Persist.SqlBackend.Internal.IsolationLevel where + +import Data.String (IsString(..)) +import Data.Monoid ((<>)) -- TODO: remove when GHC-8.2 support is dropped + +-- | Please refer to the documentation for the database in question for a full +-- overview of the semantics of the varying isloation levels +data IsolationLevel = ReadUncommitted + | ReadCommitted + | RepeatableRead + | Serializable + deriving (Show, Eq, Enum, Ord, Bounded) + +makeIsolationLevelStatement :: (Monoid s, IsString s) => IsolationLevel -> s +makeIsolationLevelStatement l = "SET TRANSACTION ISOLATION LEVEL " <> case l of + ReadUncommitted -> "READ UNCOMMITTED" + ReadCommitted -> "READ COMMITTED" + RepeatableRead -> "REPEATABLE READ" + Serializable -> "SERIALIZABLE" diff --git a/persistent/Database/Persist/SqlBackend/Internal/MkSqlBackend.hs b/persistent/Database/Persist/SqlBackend/Internal/MkSqlBackend.hs new file mode 100644 index 000000000..ca1dc3a87 --- /dev/null +++ b/persistent/Database/Persist/SqlBackend/Internal/MkSqlBackend.hs @@ -0,0 +1,79 @@ +{-# LANGUAGE RankNTypes #-} + +module Database.Persist.SqlBackend.Internal.MkSqlBackend where + +import Control.Monad.Logger (Loc, LogLevel, LogSource, LogStr) +import Data.IORef +import Data.Map (Map) +import Data.Text (Text) +import Database.Persist.SqlBackend.Internal.Statement +import Database.Persist.SqlBackend.Internal.InsertSqlResult +import Database.Persist.SqlBackend.Internal.IsolationLevel +import Database.Persist.Types.Base +import Database.Persist.Names + +-- | This type shares many of the same field names as the 'SqlBackend' type. +-- It's useful for library authors to use this when migrating from using the +-- 'SqlBackend' constructor directly to the 'mkSqlBackend' function. +-- +-- This type will only contain required fields for constructing a 'SqlBackend'. +-- For fields that aren't present on this record, you'll want to use the various +-- @set@ functions or +-- +-- @since 2.13.0.0 +data MkSqlBackendArgs = MkSqlBackendArgs + { connPrepare :: Text -> IO Statement + -- ^ This function should prepare a 'Statement' in the target database, + -- which should allow for efficient query reuse. + , connInsertSql :: EntityDef -> [PersistValue] -> InsertSqlResult + -- ^ This function generates the SQL and values necessary for + -- performing an insert against the database. + , connStmtMap :: IORef (Map Text Statement) + -- ^ A reference to the cache of statements. 'Statement's are keyed by + -- the 'Text' queries that generated them. + , connClose :: IO () + -- ^ Close the underlying connection. + , connMigrateSql + :: [EntityDef] + -> (Text -> IO Statement) + -> EntityDef + -> IO (Either [Text] [(Bool, Text)]) + -- ^ This function returns the migrations required to include the + -- 'EntityDef' parameter in the @['EntityDef']@ database. This might + -- include creating a new table if the entity is not present, or + -- altering an existing table if it is. + , connBegin :: (Text -> IO Statement) -> Maybe IsolationLevel -> IO () + -- ^ A function to begin a transaction for the underlying database. + , connCommit :: (Text -> IO Statement) -> IO () + -- ^ A function to commit a transaction to the underlying database. + , connRollback :: (Text -> IO Statement) -> IO () + -- ^ A function to roll back a transaction on the underlying database. + , connEscapeFieldName :: FieldNameDB -> Text + -- ^ A function to extract and escape the name of the column corresponding + -- to the provided field. + -- + -- @since 2.12.0.0 + , connEscapeTableName :: EntityDef -> Text + -- ^ A function to extract and escape the name of the table corresponding + -- to the provided entity. PostgreSQL uses this to support schemas. + -- + -- @since 2.12.0.0 + , connEscapeRawName :: Text -> Text + -- ^ A function to escape raw DB identifiers. MySQL uses backticks, while + -- PostgreSQL uses quotes, and so on. + -- + -- @since 2.12.0.0 + , connNoLimit :: Text + , connRDBMS :: Text + -- ^ A tag displaying what database the 'SqlBackend' is for. Can be + -- used to differentiate features in downstream libraries for different + -- database backends. + , connLimitOffset :: (Int,Int) -> Text -> Text + -- ^ Attach a 'LIMIT/OFFSET' clause to a SQL query. Note that + -- LIMIT/OFFSET is problematic for performance, and indexed range + -- queries are the superior way to offer pagination. + , connLogFunc :: LogFunc + -- ^ A log function for the 'SqlBackend' to use. + } + +type LogFunc = Loc -> LogSource -> LogLevel -> LogStr -> IO () diff --git a/persistent/Database/Persist/SqlBackend/Internal/Statement.hs b/persistent/Database/Persist/SqlBackend/Internal/Statement.hs new file mode 100644 index 000000000..ef69a644c --- /dev/null +++ b/persistent/Database/Persist/SqlBackend/Internal/Statement.hs @@ -0,0 +1,19 @@ +{-# language RankNTypes #-} + +module Database.Persist.SqlBackend.Internal.Statement where + +import Data.Acquire +import Database.Persist.Types.Base +import Data.Int +import Conduit + +-- | A 'Statement' is a representation of a database query that has been +-- prepared and stored on the server side. +data Statement = Statement + { stmtFinalize :: IO () + , stmtReset :: IO () + , stmtExecute :: [PersistValue] -> IO Int64 + , stmtQuery :: forall m. MonadIO m + => [PersistValue] + -> Acquire (ConduitM () [PersistValue] m ()) + } diff --git a/persistent/Database/Persist/TH.hs b/persistent/Database/Persist/TH.hs index 0a8cfa44c..e1df4d94f 100644 --- a/persistent/Database/Persist/TH.hs +++ b/persistent/Database/Persist/TH.hs @@ -1,21 +1,23 @@ +{-# LANGUAGE AllowAmbiguousTypes #-} {-# LANGUAGE CPP #-} +{-# LANGUAGE DataKinds #-} {-# LANGUAGE DeriveLift #-} {-# LANGUAGE DerivingStrategies #-} {-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE GeneralizedNewtypeDeriving #-} {-# LANGUAGE LambdaCase #-} {-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE PolyKinds #-} {-# LANGUAGE RankNTypes #-} {-# LANGUAGE RecordWildCards #-} {-# LANGUAGE ScopedTypeVariables #-} {-# LANGUAGE StandaloneDeriving #-} {-# LANGUAGE TemplateHaskell #-} {-# LANGUAGE TupleSections #-} +{-# LANGUAGE TypeApplications #-} {-# LANGUAGE UndecidableInstances #-} {-# LANGUAGE ViewPatterns #-} --- {-# OPTIONS_GHC -fno-warn-orphans -fno-warn-missing-fields #-} - -- | This module provides the tools for defining your database schema and using -- it to generate Haskell data types and migrations. module Database.Persist.TH @@ -27,6 +29,7 @@ module Database.Persist.TH , persistManyFileWith -- * Turn @EntityDef@s into types , mkPersist + , mkPersistWith , MkPersistSettings , mpsBackend , mpsGeneric @@ -39,8 +42,13 @@ module Database.Persist.TH , EntityJSON(..) , mkPersistSettings , sqlSettings + -- ** Implicit ID Columns + , ImplicitIdDef + , setImplicitIdDef -- * Various other TH functions , mkMigrate + , migrateModels + , discoverEntities , mkSave , mkDeleteCascade , mkEntityDefList @@ -63,6 +71,8 @@ module Database.Persist.TH import Prelude hiding (concat, exp, splitAt, take, (++)) +import GHC.Stack (HasCallStack) +import Data.Coerce import Control.Monad import Data.Aeson ( FromJSON(parseJSON) @@ -83,12 +93,13 @@ import Data.Int (Int64) import Data.Ix (Ix) import Data.List (foldl') import qualified Data.List as List +import Data.List.NonEmpty (NonEmpty(..)) import qualified Data.List.NonEmpty as NEL import qualified Data.Map as M import Data.Maybe (fromMaybe, isJust, listToMaybe, mapMaybe) import Data.Monoid (mappend, mconcat, (<>)) import Data.Proxy (Proxy(Proxy)) -import Data.Text (Text, append, concat, cons, pack, stripSuffix, uncons, unpack) +import Data.Text (Text, concat, cons, pack, stripSuffix, uncons, unpack) import qualified Data.Text as T import Data.Text.Encoding (decodeUtf8) import qualified Data.Text.Encoding as TE @@ -96,8 +107,9 @@ import Data.Typeable (Typeable) import GHC.Generics (Generic) import GHC.TypeLits import Instances.TH.Lift () - -- Bring `Lift (Map k v)` instance into scope, as well as `Lift Text` + -- Bring `Lift (fmap k v)` instance into scope, as well as `Lift Text` -- instance on pre-1.2.4 versions of `text` +import Data.Foldable (toList) import qualified Data.Set as Set import Language.Haskell.TH.Lib (appT, conE, conK, conT, litT, strTyLit, varE, varP, varT) @@ -108,14 +120,26 @@ import Web.PathPieces (PathPiece(..)) import Database.Persist import Database.Persist.Quasi +import Database.Persist.Quasi.Internal import Database.Persist.Sql (Migration, PersistFieldSql, SqlBackend, migrate, sqlType) +import Database.Persist.EntityDef.Internal (EntityDef(..), EntityIdDef(..)) +import Database.Persist.ImplicitIdDef (autoIncrementingInteger) +import Database.Persist.ImplicitIdDef.Internal + -- | Converts a quasi-quoted syntax into a list of entity definitions, to be -- used as input to the template haskell generation code (mkPersist). persistWith :: PersistSettings -> QuasiQuoter persistWith ps = QuasiQuoter - { quoteExp = parseReferences ps . pack + { quoteExp = + parseReferences ps . pack + , quotePat = + error "persistWith can't be used as pattern" + , quoteType = + error "persistWith can't be used as type" + , quoteDec = + error "persistWith can't be used as declaration" } -- | Apply 'persistWith' to 'upperCaseSettings'. @@ -192,143 +216,484 @@ getFileContents = fmap decodeUtf8 . BS.readFile -- fix the cross-references between them at runtime to create a 'Migration'. -- -- @since 2.7.2 -embedEntityDefs :: [EntityDef] -> [EntityDef] -embedEntityDefs = snd . embedEntityDefsMap - -embedEntityDefsMap :: [EntityDef] -> (M.Map EntityNameHS EmbedEntityDef, [EntityDef]) -embedEntityDefsMap rawEnts = (embedEntityMap, noCycleEnts) +embedEntityDefs + :: [EntityDef] + -- ^ A list of 'EntityDef' that have been defined in a previous 'mkPersist' + -- call. + -- + -- @since 2.13.0.0 + -> [UnboundEntityDef] + -> [UnboundEntityDef] +embedEntityDefs eds = snd . embedEntityDefsMap eds + +embedEntityDefsMap + :: [EntityDef] + -- ^ A list of 'EntityDef' that have been defined in a previous 'mkPersist' + -- call. + -- + -- @since 2.13.0.0 + -> [UnboundEntityDef] + -> (EmbedEntityMap, [UnboundEntityDef]) +embedEntityDefsMap existingEnts rawEnts = + (embedEntityMap, noCycleEnts) where - noCycleEnts = map breakEntDefCycle entsWithEmbeds + noCycleEnts = entsWithEmbeds -- every EntityDef could reference each-other (as an EmbedRef) -- let Haskell tie the knot embedEntityMap = constructEmbedEntityMap entsWithEmbeds - entsWithEmbeds = map setEmbedEntity rawEnts - setEmbedEntity ent = ent - { entityFields = map (setEmbedField (entityHaskell ent) embedEntityMap) $ entityFields ent - } - --- self references are already broken --- look at every emFieldEmbed to see if it refers to an already seen EntityNameHS --- so start with entityHaskell ent and accumulate embeddedHaskell em -breakEntDefCycle :: EntityDef -> EntityDef -breakEntDefCycle entDef = - entDef { entityFields = breakCycleField entName <$> entityFields entDef } - where - entName = - entityHaskell entDef - - breakCycleField entName f = case f of - FieldDef { fieldReference = EmbedRef em } -> - f { fieldReference = EmbedRef $ breakCycleEmbed [entName] em } - _ -> - f - - breakCycleEmbed ancestors em = - em { embeddedFields = breakCycleEmField (emName : ancestors) <$> embeddedFields em - } - where - emName = embeddedHaskell em - - breakCycleEmField ancestors emf = case embeddedHaskell <$> membed of - Nothing -> emf - Just embName -> if embName `elem` ancestors - then emf { emFieldEmbed = Nothing, emFieldCycle = Just embName } - else emf { emFieldEmbed = breakCycleEmbed ancestors <$> membed } - where - membed = emFieldEmbed emf - --- calls parse to Quasi.parse individual entities in isolation + entsWithEmbeds = fmap setEmbedEntity (rawEnts <> map unbindEntityDef existingEnts) + setEmbedEntity ubEnt = + let + ent = unboundEntityDef ubEnt + in + ubEnt + { unboundEntityDef = + overEntityFields + (fmap (setEmbedField (entityHaskell ent) embedEntityMap)) + ent + } + + +-- | Calls 'parse' to Quasi.parse individual entities in isolation -- afterwards, sets references to other entities --- | @since 2.5.3 +-- +-- In 2.13.0.0, this was changed to splice in @['UnboundEntityDef']@ +-- instead of @['EntityDef']@. +-- +-- @since 2.5.3 parseReferences :: PersistSettings -> Text -> Q Exp -parseReferences ps s = lift $ - map (mkEntityDefSqlTypeExp embedEntityMap entityMap) noCycleEnts +parseReferences ps s = lift $ parse ps s + +preprocessUnboundDefs + :: [EntityDef] + -> [UnboundEntityDef] + -> (M.Map EntityNameHS (), [UnboundEntityDef]) +preprocessUnboundDefs preexistingEntities unboundDefs = + (embedEntityMap, noCycleEnts) where - (embedEntityMap, noCycleEnts) = embedEntityDefsMap $ parse ps s - entityMap = constructEntityMap noCycleEnts + (embedEntityMap, noCycleEnts) = + embedEntityDefsMap preexistingEntities unboundDefs stripId :: FieldType -> Maybe Text stripId (FTTypeCon Nothing t) = stripSuffix "Id" t stripId _ = Nothing -foreignReference :: FieldDef -> Maybe EntityNameHS -foreignReference field = case fieldReference field of - ForeignRef ref _ -> Just ref - _ -> Nothing - --- fieldSqlType at parse time can be an Exp --- This helps delay setting fieldSqlType until lift time -data EntityDefSqlTypeExp - = EntityDefSqlTypeExp EntityDef SqlTypeExp [SqlTypeExp] - deriving Show - -data SqlTypeExp - = SqlTypeExp FieldType - | SqlType' SqlType - deriving Show +liftAndFixKeys + :: MkPersistSettings + -> M.Map EntityNameHS a + -> EntityMap + -> UnboundEntityDef + -> Q Exp +liftAndFixKeys mps emEntities entityMap unboundEnt = + let + ent = + unboundEntityDef unboundEnt + fields = + getUnboundFieldDefs unboundEnt + in + [| + ent + { entityFields = + $(ListE <$> traverse combinedFixFieldDef fields) + , entityId = + $(fixPrimarySpec mps unboundEnt) + , entityForeigns = + $(fixUnboundForeignDefs (unboundForeignDefs unboundEnt)) + } + |] + where + fixUnboundForeignDefs + :: [UnboundForeignDef] + -> Q Exp + fixUnboundForeignDefs fdefs = + fmap ListE $ forM fdefs fixUnboundForeignDef + where + fixUnboundForeignDef UnboundForeignDef{..} = + [| + unboundForeignDef + { foreignFields = + $(lift fixForeignFields) + , foreignNullable = + $(lift fixForeignNullable) + , foreignRefTableDBName = + $(lift fixForeignRefTableDBName) + } + |] + where + fixForeignRefTableDBName = + entityDB (unboundEntityDef parentDef) + foreignFieldNames = + case unboundForeignFields of + FieldListImpliedId ffns -> + ffns + FieldListHasReferences references -> + fmap ffrSourceField references + parentDef = + case M.lookup parentTableName entityMap of + Nothing -> + error $ mconcat + [ "Foreign table not defined: " + , show parentTableName + ] + Just a -> + a + parentTableName = + foreignRefTableHaskell unboundForeignDef + fixForeignFields :: [(ForeignFieldDef, ForeignFieldDef)] + fixForeignFields = + case unboundForeignFields of + FieldListImpliedId ffns -> + mkReferences $ toList ffns + FieldListHasReferences references -> + toList $ fmap convReferences references + where + -- in this case, we're up against the implied ID of the parent + -- dodgy assumption: columns are listed in the right order. we + -- can't check this any more clearly right now. + mkReferences fieldNames + | length fieldNames /= length parentKeyFieldNames = + error $ mconcat + [ "Foreign reference needs to have the same number " + , "of fields as the target table." + , "\n Table : " + , show (getUnboundEntityNameHS unboundEnt) + , "\n Foreign Table: " + , show parentTableName + , "\n Fields : " + , show fieldNames + , "\n Parent fields: " + , show (fmap fst parentKeyFieldNames) + , "\n\nYou can use the References keyword to fix this." + ] + | otherwise = + zip (fmap (withDbName fieldStore) fieldNames) parentKeyFieldNames + where + parentKeyFieldNames + :: [(FieldNameHS, FieldNameDB)] + parentKeyFieldNames = + case unboundPrimarySpec parentDef of + NaturalKey ucd -> + fmap (withDbName parentFieldStore) (unboundCompositeCols ucd) + SurrogateKey uid -> + [(FieldNameHS "Id", unboundIdDBName uid)] + DefaultKey dbName -> + [(FieldNameHS "Id", dbName)] + withDbName store fieldNameHS = + ( fieldNameHS + , findDBName store fieldNameHS + ) + convReferences + :: ForeignFieldReference + -> (ForeignFieldDef, ForeignFieldDef) + convReferences ForeignFieldReference {..} = + ( withDbName fieldStore ffrSourceField + , withDbName parentFieldStore ffrTargetField + ) + fixForeignNullable = + all ((NotNullable /=) . isFieldNullable) foreignFieldNames + where + isFieldNullable fieldNameHS = + case getFieldDef fieldNameHS fieldStore of + Nothing -> + error "Field name not present in map" + Just a -> + nullable (unboundFieldAttrs a) + + fieldStore = + mkFieldStore unboundEnt + parentFieldStore = + mkFieldStore parentDef + findDBName store fieldNameHS = + case getFieldDBName fieldNameHS store of + Nothing -> + error $ mconcat + [ "findDBName: failed to fix dbname for: " + , show fieldNameHS + ] + Just a-> + a + + combinedFixFieldDef :: UnboundFieldDef -> Q Exp + combinedFixFieldDef ufd@UnboundFieldDef{..} = + [| + FieldDef + { fieldHaskell = + unboundFieldNameHS + , fieldDB = + unboundFieldNameDB + , fieldType = + unboundFieldType + , fieldSqlType = + $(sqlTyp') + , fieldAttrs = + unboundFieldAttrs + , fieldStrict = + unboundFieldStrict + , fieldReference = + $(fieldRef') + , fieldCascade = + unboundFieldCascade + , fieldComments = + unboundFieldComments + , fieldGenerated = + unboundFieldGenerated + , fieldIsImplicitIdColumn = + False + } + |] + where + sqlTypeExp = + getSqlType emEntities entityMap ufd + FieldDef _x _ _ _ _ _ _ _ _ _ _ = + error "need to update this record wildcard match" + (fieldRef', sqlTyp') = + case extractForeignRef entityMap ufd of + Just targetTable -> + (lift (ForeignRef targetTable), liftSqlTypeExp (SqlTypeReference targetTable)) + Nothing -> + (lift NoReference, liftSqlTypeExp sqlTypeExp) + +data FieldStore + = FieldStore + { fieldStoreMap :: M.Map FieldNameHS UnboundFieldDef + , fieldStoreId :: Maybe FieldNameDB + , fieldStoreEntity :: UnboundEntityDef + } -instance Lift SqlTypeExp where - lift (SqlType' t) = lift t - lift (SqlTypeExp ftype) = return st - where - typ = ftToType ftype - mtyp = ConT ''Proxy `AppT` typ - typedNothing = SigE (ConE 'Proxy) mtyp - st = VarE 'sqlType `AppE` typedNothing -#if MIN_VERSION_template_haskell(2,16,0) - liftTyped = unsafeTExpCoerce . lift -#endif +mkFieldStore :: UnboundEntityDef -> FieldStore +mkFieldStore ued = + FieldStore + { fieldStoreEntity = ued + , fieldStoreMap = + M.fromList + $ fmap (\ufd -> + ( unboundFieldNameHS ufd + , ufd + ) + ) + $ getUnboundFieldDefs + $ ued + , fieldStoreId = + case unboundPrimarySpec ued of + NaturalKey _ -> + Nothing + SurrogateKey fd -> + Just $ unboundIdDBName fd + DefaultKey n -> + Just n + } -data FieldsSqlTypeExp = FieldsSqlTypeExp [FieldDef] [SqlTypeExp] +getFieldDBName :: FieldNameHS -> FieldStore -> Maybe FieldNameDB +getFieldDBName name fs + | FieldNameHS "Id" == name = + fieldStoreId fs + | otherwise = + unboundFieldNameDB <$> getFieldDef name fs + +getFieldDef :: FieldNameHS -> FieldStore -> Maybe UnboundFieldDef +getFieldDef fieldNameHS fs = + M.lookup fieldNameHS (fieldStoreMap fs) + +extractForeignRef :: EntityMap -> UnboundFieldDef -> Maybe EntityNameHS +extractForeignRef entityMap fieldDef = do + refName <- guessFieldReference fieldDef + ent <- M.lookup refName entityMap + pure $ entityHaskell $ unboundEntityDef ent + +guessFieldReference :: UnboundFieldDef -> Maybe EntityNameHS +guessFieldReference = guessReference . unboundFieldType + +guessReference :: FieldType -> Maybe EntityNameHS +guessReference ft = + case ft of + FTTypeCon Nothing (T.stripSuffix "Id" -> Just tableName) -> + Just (EntityNameHS tableName) + FTApp (FTTypeCon Nothing "Key") (FTTypeCon Nothing tableName) -> + Just (EntityNameHS tableName) + _ -> + Nothing -instance Lift FieldsSqlTypeExp where - lift (FieldsSqlTypeExp fields sqlTypeExps) = - lift $ zipWith FieldSqlTypeExp fields sqlTypeExps -#if MIN_VERSION_template_haskell(2,16,0) - liftTyped = unsafeTExpCoerce . lift -#endif +mkDefaultKey + :: MkPersistSettings + -> FieldNameDB + -> EntityNameHS + -> FieldDef +mkDefaultKey mps pk unboundHaskellName = + let + iid = + mpsImplicitIdDef mps + in + maybe id addFieldAttr (FieldAttrDefault <$> iidDefault iid) $ + maybe id addFieldAttr (FieldAttrMaxlen <$> iidMaxLen iid) $ + mkAutoIdField' pk unboundHaskellName (iidFieldSqlType iid) + +fixPrimarySpec + :: MkPersistSettings + -> UnboundEntityDef + -> Q Exp +fixPrimarySpec mps unboundEnt= do + case unboundPrimarySpec unboundEnt of + DefaultKey pk -> + lift $ EntityIdField $ + mkDefaultKey mps pk unboundHaskellName + SurrogateKey uid -> do + let + entNameHS = + getUnboundEntityNameHS unboundEnt + fieldTyp = + fromMaybe (mkKeyConType entNameHS) (unboundIdType uid) + [| + EntityIdField + FieldDef + { fieldHaskell = + FieldNameHS "Id" + , fieldDB = + $(lift $ getSqlNameOr (unboundIdDBName uid) (unboundIdAttrs uid)) + , fieldType = + $(lift fieldTyp) + , fieldSqlType = + $( liftSqlTypeExp (SqlTypeExp fieldTyp) ) + , fieldStrict = + False + , fieldReference = + ForeignRef entNameHS + , fieldAttrs = + unboundIdAttrs uid + , fieldComments = + Nothing + , fieldCascade = unboundIdCascade uid + , fieldGenerated = Nothing + , fieldIsImplicitIdColumn = True + } -data FieldSqlTypeExp = FieldSqlTypeExp FieldDef SqlTypeExp + |] + NaturalKey ucd -> + [| EntityIdNaturalKey $(bindCompositeDef unboundEnt ucd) |] + where + unboundHaskellName = + getUnboundEntityNameHS unboundEnt + +bindCompositeDef :: UnboundEntityDef -> UnboundCompositeDef -> Q Exp +bindCompositeDef ued ucd = do + fieldDefs <- + fmap ListE $ forM (unboundCompositeCols ucd) $ \col -> + mkLookupEntityField ued col + [| + CompositeDef + { compositeFields = + NEL.fromList $(pure fieldDefs) + , compositeAttrs = + $(lift $ unboundCompositeAttrs ucd) + } + |] -instance Lift FieldSqlTypeExp where - lift (FieldSqlTypeExp FieldDef{..} sqlTypeExp) = - [|FieldDef fieldHaskell fieldDB fieldType $(lift sqlTypeExp) fieldAttrs fieldStrict fieldReference fieldCascade fieldComments fieldGenerated|] - where - FieldDef _x _ _ _ _ _ _ _ _ _ = - error "need to update this record wildcard match" -#if MIN_VERSION_template_haskell(2,16,0) - liftTyped = unsafeTExpCoerce . lift -#endif +getSqlType :: M.Map EntityNameHS a -> EntityMap -> UnboundFieldDef -> SqlTypeExp +getSqlType emEntities entityMap field = + maybe + (defaultSqlTypeExp emEntities entityMap field) + (SqlType' . SqlOther) + (listToMaybe $ mapMaybe attrSqlType $ unboundFieldAttrs field) + +-- In the case of embedding, there won't be any datatype created yet. +-- We just use SqlString, as the data will be serialized to JSON. +defaultSqlTypeExp :: M.Map EntityNameHS a -> EntityMap -> UnboundFieldDef -> SqlTypeExp +defaultSqlTypeExp emEntities entityMap field = + case mEmbedded emEntities ftype of + Right _ -> + SqlType' SqlString + Left (Just (FTKeyCon ty)) -> + SqlTypeExp (FTTypeCon Nothing ty) + Left Nothing -> + case extractForeignRef entityMap field of + Just refName -> + case M.lookup refName entityMap of + Nothing -> + -- error $ mconcat + -- [ "Failed to find model: " + -- , show refName + -- , " in entity list: \n" + -- ] + -- <> (unlines $ map show $ M.keys $ entityMap) + -- going to assume that it's fine, will reify it out + -- right later anyway) + SqlTypeExp ftype + -- A ForeignRef is blindly set to an Int64 in setEmbedField + -- correct that now + Just _ -> + SqlTypeReference refName + _ -> + case ftype of + -- In the case of lists, we always serialize to a string + -- value (via JSON). + -- + -- Normally, this would be determined automatically by + -- SqlTypeExp. However, there's one corner case: if there's + -- a list of entity IDs, the datatype for the ID has not + -- yet been created, so the compiler will fail. This extra + -- clause works around this limitation. + FTList _ -> + SqlType' SqlString + _ -> + SqlTypeExp ftype + where + ftype = unboundFieldType field -instance Lift EntityDefSqlTypeExp where - lift (EntityDefSqlTypeExp ent sqlTypeExp sqlTypeExps) = - [|ent { entityFields = $(lift $ FieldsSqlTypeExp (entityFields ent) sqlTypeExps) - , entityId = $(lift $ FieldSqlTypeExp (entityId ent) sqlTypeExp) - } - |] -#if MIN_VERSION_template_haskell(2,16,0) - liftTyped = unsafeTExpCoerce . lift -#endif +attrSqlType :: FieldAttr -> Maybe Text +attrSqlType = \case + FieldAttrSqltype x -> Just x + _ -> Nothing -type EmbedEntityMap = M.Map EntityNameHS EmbedEntityDef +data SqlTypeExp + = SqlTypeExp FieldType + | SqlType' SqlType + | SqlTypeReference EntityNameHS + deriving Show -constructEmbedEntityMap :: [EntityDef] -> EmbedEntityMap +liftSqlTypeExp :: SqlTypeExp -> Q Exp +liftSqlTypeExp ste = + case ste of + SqlType' t -> + lift t + SqlTypeExp ftype -> do + let + typ = ftToType ftype + mtyp = ConT ''Proxy `AppT` typ + typedNothing = SigE (ConE 'Proxy) mtyp + pure $ VarE 'sqlType `AppE` typedNothing + SqlTypeReference entNameHs -> do + let + entNameId :: Name + entNameId = + mkName $ T.unpack (unEntityNameHS entNameHs) <> "Id" + + [| sqlType (Proxy :: Proxy $(conT entNameId)) |] + + +type EmbedEntityMap = M.Map EntityNameHS () + +constructEmbedEntityMap :: [UnboundEntityDef] -> EmbedEntityMap constructEmbedEntityMap = - M.fromList . fmap (\ent -> (entityHaskell ent, toEmbedEntityDef ent)) + M.fromList . fmap + (\ent -> + ( entityHaskell (unboundEntityDef ent) + -- , toEmbedEntityDef (unboundEntityDef ent) + , () + ) + ) -lookupEmbedEntity :: EmbedEntityMap -> FieldDef -> Maybe EntityNameHS +lookupEmbedEntity :: M.Map EntityNameHS a -> FieldDef -> Maybe EntityNameHS lookupEmbedEntity allEntities field = do entName <- EntityNameHS <$> stripId (fieldType field) - guard (M.member entName allEntities) -- check entity name exists in embed map + guard (M.member entName allEntities) -- check entity name exists in embed fmap pure entName -type EntityMap = M.Map EntityNameHS EntityDef +type EntityMap = M.Map EntityNameHS UnboundEntityDef -constructEntityMap :: [EntityDef] -> EntityMap +constructEntityMap :: [UnboundEntityDef] -> EntityMap constructEntityMap = - M.fromList . fmap (\ent -> (entityHaskell ent, ent)) + M.fromList . fmap (\ent -> (entityHaskell (unboundEntityDef ent), ent)) -data FTTypeConDescr = FTKeyCon +data FTTypeConDescr = FTKeyCon Text deriving Show -- | Recurses through the 'FieldType'. Returns a 'Right' with the @@ -342,114 +707,84 @@ data FTTypeConDescr = FTKeyCon -- If the 'FieldType' has a module qualified value, then it returns @'Left' -- 'Nothing'@. mEmbedded - :: EmbedEntityMap + :: M.Map EntityNameHS a -> FieldType - -> Either (Maybe FTTypeConDescr) EmbedEntityDef + -> Either (Maybe FTTypeConDescr) EntityNameHS mEmbedded _ (FTTypeCon Just{} _) = Left Nothing mEmbedded ents (FTTypeCon Nothing (EntityNameHS -> name)) = - maybe (Left Nothing) Right $ M.lookup name ents + maybe (Left Nothing) (\_ -> Right name) $ M.lookup name ents mEmbedded ents (FTTypePromoted (EntityNameHS -> name)) = Left Nothing mEmbedded ents (FTList x) = mEmbedded ents x -mEmbedded ents (FTApp x y) = - -- Key converts an Record to a RecordId - -- special casing this is obviously a hack - -- This problem may not be solvable with the current QuasiQuoted approach though - if x == FTTypeCon Nothing "Key" - then Left $ Just FTKeyCon - else mEmbedded ents y - -setEmbedField :: EntityNameHS -> EmbedEntityMap -> FieldDef -> FieldDef +mEmbedded _ (FTApp (FTTypeCon Nothing "Key") (FTTypeCon _ a)) = + Left $ Just $ FTKeyCon $ a <> "Id" +mEmbedded _ (FTApp _ _) = + Left Nothing + +setEmbedField :: EntityNameHS -> M.Map EntityNameHS a -> FieldDef -> FieldDef setEmbedField entName allEntities field = case fieldReference field of - NoReference -> setFieldReference ref field - _ -> field + NoReference -> + setFieldReference ref field + _ -> + field where ref = case mEmbedded allEntities (fieldType field) of Left _ -> fromMaybe NoReference $ do - entName <- lookupEmbedEntity allEntities field - -- This can get corrected in mkEntityDefSqlTypeExp - let placeholderIdType = FTTypeCon (Just "Data.Int") "Int64" - pure $ ForeignRef entName placeholderIdType + refEntName <- lookupEmbedEntity allEntities field + pure $ ForeignRef refEntName Right em -> - if embeddedHaskell em /= entName + if em /= entName then EmbedRef em - else if maybeNullable field + else if maybeNullable (unbindFieldDef field) then SelfReference else case fieldType field of FTList _ -> SelfReference - _ -> error $ unpack $ unEntityNameHS entName <> ": a self reference must be a Maybe" + _ -> error $ unpack $ unEntityNameHS entName <> ": a self reference must be a Maybe or List" setFieldReference :: ReferenceDef -> FieldDef -> FieldDef setFieldReference ref field = field { fieldReference = ref } -mkEntityDefSqlTypeExp :: EmbedEntityMap -> EntityMap -> EntityDef -> EntityDefSqlTypeExp -mkEntityDefSqlTypeExp emEntities entityMap ent = - EntityDefSqlTypeExp ent (getSqlType $ entityId ent) (map getSqlType $ entityFields ent) - where - getSqlType field = - maybe - (defaultSqlTypeExp field) - (SqlType' . SqlOther) - (listToMaybe $ mapMaybe (\case {FieldAttrSqltype x -> Just x; _ -> Nothing}) $ fieldAttrs field) - - -- In the case of embedding, there won't be any datatype created yet. - -- We just use SqlString, as the data will be serialized to JSON. - defaultSqlTypeExp field = - case mEmbedded emEntities ftype of - Right _ -> - SqlType' SqlString - Left (Just FTKeyCon) -> - SqlType' SqlString - Left Nothing -> - case fieldReference field of - ForeignRef refName ft -> - case M.lookup refName entityMap of - Nothing -> SqlTypeExp ft - -- A ForeignRef is blindly set to an Int64 in setEmbedField - -- correct that now - Just ent' -> - case entityPrimary ent' of - Nothing -> SqlTypeExp ft - Just pdef -> - case compositeFields pdef of - [] -> error "mkEntityDefSqlTypeExp: no composite fields" - [x] -> SqlTypeExp $ fieldType x - _ -> SqlType' $ SqlOther "Composite Reference" - CompositeRef _ -> - SqlType' $ SqlOther "Composite Reference" - _ -> - case ftype of - -- In the case of lists, we always serialize to a string - -- value (via JSON). - -- - -- Normally, this would be determined automatically by - -- SqlTypeExp. However, there's one corner case: if there's - -- a list of entity IDs, the datatype for the ID has not - -- yet been created, so the compiler will fail. This extra - -- clause works around this limitation. - FTList _ -> SqlType' SqlString - _ -> SqlTypeExp ftype - where - ftype = fieldType field - -- | Create data types and appropriate 'PersistEntity' instances for the given -- 'EntityDef's. Works well with the persist quasi-quoter. -mkPersist :: MkPersistSettings -> [EntityDef] -> Q [Dec] -mkPersist mps ents' = do +mkPersist + :: MkPersistSettings + -> [UnboundEntityDef] + -> Q [Dec] +mkPersist mps = mkPersistWith mps [] + +-- | Like ' +-- +-- @since 2.13.0.0 +mkPersistWith + :: MkPersistSettings + -> [EntityDef] + -> [UnboundEntityDef] + -> Q [Dec] +mkPersistWith mps preexistingEntities ents' = do + let + (embedEntityMap, predefs) = + preprocessUnboundDefs preexistingEntities ents' + allEnts = + embedEntityDefs preexistingEntities + $ fmap (setDefaultIdFields mps) + $ predefs + entityMap = + constructEntityMap allEnts + ents <- filterM shouldGenerateCode allEnts requireExtensions [ [TypeFamilies], [GADTs, ExistentialQuantification] , [DerivingStrategies], [GeneralizedNewtypeDeriving], [StandaloneDeriving] , [UndecidableInstances], [DataKinds], [FlexibleInstances] ] persistFieldDecs <- fmap mconcat $ mapM (persistFieldFromEntity mps) ents - entityDecs <- fmap mconcat $ mapM (mkEntity entityMap mps) ents + entityDecs <- fmap mconcat $ mapM (mkEntity embedEntityMap entityMap mps) ents jsonDecs <- fmap mconcat $ mapM (mkJSON mps) ents uniqueKeyInstances <- fmap mconcat $ mapM (mkUniqueKeyInstances mps) ents - symbolToFieldInstances <- fmap mconcat $ mapM (mkSymbolToFieldInstances mps) ents + symbolToFieldInstances <- fmap mconcat $ mapM (mkSymbolToFieldInstances mps entityMap) ents return $ mconcat [ persistFieldDecs , entityDecs @@ -457,27 +792,89 @@ mkPersist mps ents' = do , uniqueKeyInstances , symbolToFieldInstances ] + +-- we can't just use 'isInstance' because TH throws an error +shouldGenerateCode :: UnboundEntityDef -> Q Bool +shouldGenerateCode ed = do + mtyp <- lookupTypeName entityName + case mtyp of + Nothing -> do + pure True + Just typeName -> do + instanceExists <- isInstance ''PersistEntity [ConT typeName] + pure (not instanceExists) where - ents = map fixEntityDef ents' - entityMap = constructEntityMap ents + entityName = + T.unpack . unEntityNameHS . getEntityHaskellName . unboundEntityDef $ ed + +overEntityDef :: (EntityDef -> EntityDef) -> UnboundEntityDef -> UnboundEntityDef +overEntityDef f ued = ued { unboundEntityDef = f (unboundEntityDef ued) } + +setDefaultIdFields :: MkPersistSettings -> UnboundEntityDef -> UnboundEntityDef +setDefaultIdFields mps ued + | defaultIdType ued = + overEntityDef + (setEntityIdDef (setToMpsDefault (mpsImplicitIdDef mps) (getEntityId ed))) + ued + | otherwise = + ued + where + ed = + unboundEntityDef ued + setToMpsDefault :: ImplicitIdDef -> EntityIdDef -> EntityIdDef + setToMpsDefault iid (EntityIdField fd) = + EntityIdField fd + { fieldType = + iidFieldType iid (getEntityHaskellName ed) + , fieldSqlType = + iidFieldSqlType iid + , fieldAttrs = + let + def = + toList (FieldAttrDefault <$> iidDefault iid) + maxlen = + toList (FieldAttrMaxlen <$> iidMaxLen iid) + in + def <> maxlen <> fieldAttrs fd + , fieldIsImplicitIdColumn = + True + } + setToMpsDefault _ x = + x -- | Implement special preprocessing on EntityDef as necessary for 'mkPersist'. -- For example, strip out any fields marked as MigrationOnly. -fixEntityDef :: EntityDef -> EntityDef -fixEntityDef ed = - ed { entityFields = filter keepField $ entityFields ed } - where - keepField fd = FieldAttrMigrationOnly `notElem` fieldAttrs fd && - FieldAttrSafeToRemove `notElem` fieldAttrs fd +-- +-- This should be called when performing Haskell codegen, but the 'EntityDef' +-- *should* keep all of the fields present when defining 'entityDef'. This is +-- necessary so that migrations know to keep these columns around, or to delete +-- them, as appropriate. +fixEntityDef :: UnboundEntityDef -> UnboundEntityDef +fixEntityDef ued = + ued + { unboundEntityFields = + filter isHaskellUnboundField (unboundEntityFields ued) + } -- | Settings to be passed to the 'mkPersist' function. data MkPersistSettings = MkPersistSettings { mpsBackend :: Type - -- ^ Which database backend we\'re using. + -- ^ Which database backend we\'re using. This type is used for the + -- 'PersistEntityBackend' associated type in the entities that are + -- generated. + -- + -- If the 'mpsGeneric' value is set to 'True', then this type is used for + -- the non-Generic type alias. The data and type will be named: + -- + -- @ + -- data ModelGeneric backend = Model { ... } + -- @ -- - -- When generating data types, each type is given a generic version- which - -- works with any backend- and a type synonym for the commonly used - -- backend. This is where you specify that commonly used backend. + -- And, for convenience's sake, we provide a type alias: + -- + -- @ + -- type Model = ModelGeneric $(the type you give here) + -- @ , mpsGeneric :: Bool -- ^ Create generic types that can be used with multiple backends. Good for -- reusable code, but makes error messages harder to understand. Default: @@ -485,47 +882,73 @@ data MkPersistSettings = MkPersistSettings , mpsPrefixFields :: Bool -- ^ Prefix field names with the model name. Default: True. -- - -- Note: this field is deprecated. Use the mpsFieldLabelModifier and mpsConstraintLabelModifier instead. + -- Note: this field is deprecated. Use the mpsFieldLabelModifier and + -- 'mpsConstraintLabelModifier' instead. , mpsFieldLabelModifier :: Text -> Text -> Text - -- ^ Customise the field accessors and lens names using the entity and field name. - -- Both arguments are upper cased. + -- ^ Customise the field accessors and lens names using the entity and field + -- name. Both arguments are upper cased. -- -- Default: appends entity and field. -- -- Note: this setting is ignored if mpsPrefixFields is set to False. + -- -- @since 2.11.0.0 , mpsConstraintLabelModifier :: Text -> Text -> Text - -- ^ Customise the Constraint names using the entity and field name. The result - -- should be a valid haskell type (start with an upper cased letter). + -- ^ Customise the Constraint names using the entity and field name. The + -- result should be a valid haskell type (start with an upper cased letter). -- -- Default: appends entity and field -- -- Note: this setting is ignored if mpsPrefixFields is set to False. + -- -- @since 2.11.0.0 , mpsEntityJSON :: Maybe EntityJSON -- ^ Generate @ToJSON@/@FromJSON@ instances for each model types. If it's -- @Nothing@, no instances will be generated. Default: -- -- @ - -- Just EntityJSON - -- { entityToJSON = 'entityIdToJSON - -- , entityFromJSON = 'entityIdFromJSON + -- Just 'EntityJSON' + -- { 'entityToJSON' = 'entityIdToJSON + -- , 'entityFromJSON' = 'entityIdFromJSON -- } -- @ - , mpsGenerateLenses :: !Bool - -- ^ Instead of generating normal field accessors, generator lens-style accessors. + , mpsGenerateLenses :: Bool + -- ^ Instead of generating normal field accessors, generator lens-style + -- accessors. -- -- Default: False -- -- @since 1.3.1 - , mpsDeriveInstances :: ![Name] - -- ^ Automatically derive these typeclass instances for all record and key types. + , mpsDeriveInstances :: [Name] + -- ^ Automatically derive these typeclass instances for all record and key + -- types. -- -- Default: [] -- -- @since 2.8.1 + , mpsImplicitIdDef :: ImplicitIdDef + -- ^ TODO: document + -- + -- @since 2.13.0.0 } +{-# DEPRECATED mpsGeneric "The mpsGeneric function adds a considerable amount of overhead and complexity to the library without bringing significant benefit. We would like to remove it. If you require this feature, please comment on the linked GitHub issue, and we'll either keep it around, or we can figure out a nicer way to solve your problem.\n\n Github: https://github.com/yesodweb/persistent/issues/1204" #-} + +-- | Set the 'ImplicitIdDef' in the given 'MkPersistSettings'. The default +-- value is 'autoIncrementingInteger'. +-- +-- @since 2.13.0.0 +setImplicitIdDef :: ImplicitIdDef -> MkPersistSettings -> MkPersistSettings +setImplicitIdDef iid mps = + mps { mpsImplicitIdDef = iid } + +getImplicitIdType :: MkPersistSettings -> Type +getImplicitIdType = do + idDef <- mpsImplicitIdDef + isGeneric <- mpsGeneric + backendTy <- mpsBackend + pure $ iidType idDef isGeneric backendTy + data EntityJSON = EntityJSON { entityToJSON :: Name -- ^ Name of the @toJSON@ implementation for @Entity a@. @@ -549,6 +972,8 @@ mkPersistSettings backend = MkPersistSettings } , mpsGenerateLenses = False , mpsDeriveInstances = [] + , mpsImplicitIdDef = + autoIncrementingInteger } -- | Use the 'SqlPersist' backend. @@ -567,17 +992,19 @@ upperFirst t = Just (a, b) -> cons (toUpper a) b Nothing -> t -dataTypeDec :: MkPersistSettings -> EntityDef -> Q Dec -dataTypeDec mps entDef = do - let names = mkEntityDefDeriveNames mps entDef +dataTypeDec :: MkPersistSettings -> EntityMap -> UnboundEntityDef -> Q Dec +dataTypeDec mps entityMap entDef = do + let + names = + mkEntityDefDeriveNames mps entDef - let (stocks, anyclasses) = partitionEithers (map stratFor names) + let (stocks, anyclasses) = partitionEithers (fmap stratFor names) let stockDerives = do guard (not (null stocks)) - pure (DerivClause (Just StockStrategy) (map ConT stocks)) + pure (DerivClause (Just StockStrategy) (fmap ConT stocks)) anyclassDerives = do guard (not (null anyclasses)) - pure (DerivClause (Just AnyclassStrategy) (map ConT anyclasses)) + pure (DerivClause (Just AnyclassStrategy) (fmap ConT anyclasses)) unless (null anyclassDerives) $ do requireExtensions [[DeriveAnyClass]] pure $ DataD [] nameFinal paramsFinal @@ -592,7 +1019,7 @@ dataTypeDec mps entDef = do Right n stockClasses = - Set.fromList (map mkName + Set.fromList (fmap mkName [ "Eq", "Ord", "Show", "Read", "Bounded", "Enum", "Ix", "Generic", "Data", "Typeable" ] <> [''Eq, ''Ord, ''Show, ''Read, ''Bounded, ''Enum, ''Ix, ''Generic, ''Data, ''Typeable ] @@ -604,55 +1031,56 @@ dataTypeDec mps entDef = do cols :: [VarBangType] cols = do - fieldDef <- entityFields entDef + fieldDef <- getUnboundFieldDefs entDef let recordName = fieldDefToRecordName mps entDef fieldDef - strictness = if fieldStrict fieldDef then isStrict else notStrict - fieldIdType = maybeIdType mps fieldDef Nothing Nothing - in pure (recordName, strictness, fieldIdType) + strictness = if unboundFieldStrict fieldDef then isStrict else notStrict + fieldIdType = maybeIdType mps entityMap fieldDef Nothing Nothing + pure (recordName, strictness, fieldIdType) constrs - | entitySum entDef = map sumCon $ entityFields entDef + | unboundEntitySum entDef = fmap sumCon $ getUnboundFieldDefs entDef | otherwise = [RecC (mkEntityDefName entDef) cols] sumCon fieldDef = NormalC (sumConstrName mps entDef fieldDef) - [(notStrict, maybeIdType mps fieldDef Nothing Nothing)] + [(notStrict, maybeIdType mps entityMap fieldDef Nothing Nothing)] -uniqueTypeDec :: MkPersistSettings -> EntityDef -> Dec -uniqueTypeDec mps entDef = +uniqueTypeDec :: MkPersistSettings -> EntityMap -> UnboundEntityDef -> Dec +uniqueTypeDec mps entityMap entDef = + DataInstD + [] #if MIN_VERSION_template_haskell(2,15,0) - DataInstD [] Nothing - (AppT (ConT ''Unique) (genericDataType mps (entityHaskell entDef) backendT)) - Nothing - (map (mkUnique mps entDef) $ entityUniques entDef) - [] + Nothing + (AppT (ConT ''Unique) (genericDataType mps (getUnboundEntityNameHS entDef) backendT)) #else - DataInstD [] ''Unique - [genericDataType mps (entityHaskell entDef) backendT] - Nothing - (map (mkUnique mps entDef) $ entityUniques entDef) - [] + ''Unique + [genericDataType mps (getUnboundEntityNameHS entDef) backendT] #endif + Nothing + (fmap (mkUnique mps entityMap entDef) $ entityUniques (unboundEntityDef entDef)) + [] -mkUnique :: MkPersistSettings -> EntityDef -> UniqueDef -> Con -mkUnique mps entDef (UniqueDef constr _ fields attrs) = - NormalC (mkConstraintName constr) types +mkUnique :: MkPersistSettings -> EntityMap -> UnboundEntityDef -> UniqueDef -> Con +mkUnique mps entityMap entDef (UniqueDef constr _ fields attrs) = + NormalC (mkConstraintName constr) $ toList types where types = - map (go . flip lookup3 (entityFields entDef) . unFieldNameHS . fst) fields + fmap (go . flip lookup3 (getUnboundFieldDefs entDef) . unFieldNameHS . fst) fields force = "!force" `elem` attrs - go :: (FieldDef, IsNullable) -> (Strict, Type) + go :: (UnboundFieldDef, IsNullable) -> (Strict, Type) go (_, Nullable _) | not force = error nullErrMsg - go (fd, y) = (notStrict, maybeIdType mps fd Nothing (Just y)) + go (fd, y) = (notStrict, maybeIdType mps entityMap fd Nothing (Just y)) - lookup3 :: Text -> [FieldDef] -> (FieldDef, IsNullable) + lookup3 :: Text -> [UnboundFieldDef] -> (UnboundFieldDef, IsNullable) lookup3 s [] = error $ unpack $ "Column not found: " ++ s ++ " in unique " ++ unConstraintNameHS constr - lookup3 x (fd@FieldDef {..}:rest) - | x == unFieldNameHS fieldHaskell = (fd, nullable fieldAttrs) - | otherwise = lookup3 x rest + lookup3 x (fd:rest) + | x == unFieldNameHS (unboundFieldNameHS fd) = + (fd, nullable $ unboundFieldAttrs fd) + | otherwise = + lookup3 x rest nullErrMsg = mconcat [ "Error: By default we disallow NULLables in an uniqueness " @@ -666,16 +1094,25 @@ mkUnique mps entDef (UniqueDef constr _ fields attrs) = maybeIdType :: MkPersistSettings - -> FieldDef + -> EntityMap + -> UnboundFieldDef -> Maybe Name -- ^ backend -> Maybe IsNullable -> Type -maybeIdType mps fieldDef mbackend mnull = maybeTyp mayNullable idtyp +maybeIdType mps entityMap fieldDef mbackend mnull = + maybeTyp mayNullable idType where - mayNullable = case mnull of - (Just (Nullable ByMaybeAttr)) -> True - _ -> maybeNullable fieldDef - idtyp = idType mps fieldDef mbackend + mayNullable = + case mnull of + Just (Nullable ByMaybeAttr) -> + True + _ -> + maybeNullable fieldDef + idType = fromMaybe (ftToType $ unboundFieldType fieldDef) $ do + typ <- extractForeignRef entityMap fieldDef + pure $ + ConT ''Key + `AppT` genericDataType mps typ (VarT $ fromMaybe backendName mbackend) backendDataType :: MkPersistSettings -> Type backendDataType mps @@ -691,14 +1128,6 @@ genericDataType mps name backend | mpsGeneric mps = ConT (mkEntityNameHSGenericName name) `AppT` backend | otherwise = ConT $ mkEntityNameHSName name -idType :: MkPersistSettings -> FieldDef -> Maybe Name -> Type -idType mps fieldDef mbackend = - case foreignReference fieldDef of - Just typ -> - ConT ''Key - `AppT` genericDataType mps typ (VarT $ fromMaybe backendName mbackend) - Nothing -> ftToType $ fieldType fieldDef - degen :: [Clause] -> [Clause] degen [] = let err = VarE 'error `AppE` LitE (StringL @@ -706,8 +1135,26 @@ degen [] = in [normalClause [WildP] err] degen x = x -mkToPersistFields :: MkPersistSettings -> EntityDef -> Q Dec -mkToPersistFields mps ed@EntityDef { entitySum = isSum, entityFields = fields } = do +-- needs: +-- +-- * isEntitySum ed +-- * field accesor +-- * getEntityFields ed +-- * used in goSum, or sumConstrName +-- * mkEntityDefName ed +-- * uses entityHaskell +-- * sumConstrName ed fieldDef +-- * only needs entity name and field name +-- +-- data MkToPersistFields = MkToPersistFields +-- { isEntitySum :: Bool +-- , entityHaskell :: HaskellNameHS +-- , entityFieldNames :: [FieldNameHS] +-- } +mkToPersistFields :: MkPersistSettings -> UnboundEntityDef -> Q Dec +mkToPersistFields mps ed = do + let isSum = unboundEntitySum ed + fields = getUnboundFieldDefs ed clauses <- if isSum then sequence $ zipWith goSum fields [1..] @@ -718,14 +1165,14 @@ mkToPersistFields mps ed@EntityDef { entitySum = isSum, entityFields = fields } go = do xs <- sequence $ replicate fieldCount $ newName "x" let name = mkEntityDefName ed - pat = ConP name $ map VarP xs + pat = ConP name $ fmap VarP xs sp <- [|SomePersistField|] - let bod = ListE $ map (AppE sp . VarE) xs + let bod = ListE $ fmap (AppE sp . VarE) xs return $ normalClause [pat] bod - fieldCount = length fields + fieldCount = length (getUnboundFieldDefs ed) - goSum :: FieldDef -> Int -> Q Clause + goSum :: UnboundFieldDef -> Int -> Q Clause goSum fieldDef idx = do let name = sumConstrName mps ed fieldDef enull <- [|SomePersistField PersistNull|] @@ -762,9 +1209,9 @@ mkUniqueToValues pairs = do go :: UniqueDef -> Q Clause go (UniqueDef constr _ names _) = do xs <- mapM (const $ newName "x") names - let pat = ConP (mkConstraintName constr) $ map VarP xs + let pat = ConP (mkConstraintName constr) $ fmap VarP $ toList xs tpv <- [|toPersistValue|] - let bod = ListE $ map (AppE tpv . VarE) xs + let bod = ListE $ fmap (AppE tpv . VarE) $ toList xs return $ normalClause [pat] bod isNotNull :: PersistValue -> Bool @@ -775,26 +1222,37 @@ mapLeft :: (a -> c) -> Either a b -> Either c b mapLeft _ (Right r) = Right r mapLeft f (Left l) = Left (f l) -mkFromPersistValues :: MkPersistSettings -> EntityDef -> Q [Clause] -mkFromPersistValues _ entDef@(EntityDef { entitySum = False }) = - fromValues entDef "fromPersistValues" entE $ entityFields entDef - where - entE = entityDefConE entDef - -mkFromPersistValues mps entDef@(EntityDef { entitySum = True }) = do - nothing <- [|Left ("Invalid fromPersistValues input: sum type with all nulls. Entity: " `mappend` entName)|] - clauses <- mkClauses [] $ entityFields entDef - return $ clauses `mappend` [normalClause [WildP] nothing] +-- needs: +-- +-- * getEntityFields +-- * sumConstrName on field +-- * fromValues +-- * entityHaskell +-- * sumConstrName +-- * entityDefConE +-- +-- +mkFromPersistValues :: MkPersistSettings -> UnboundEntityDef -> Q [Clause] +mkFromPersistValues mps entDef + | unboundEntitySum entDef = do + nothing <- [|Left ("Invalid fromPersistValues input: sum type with all nulls. Entity: " `mappend` entName)|] + clauses <- mkClauses [] $ getUnboundFieldDefs entDef + return $ clauses `mappend` [normalClause [WildP] nothing] + | otherwise = + fromValues entDef "fromPersistValues" entE + $ fmap unboundFieldNameHS + $ filter isHaskellUnboundField + $ getUnboundFieldDefs entDef where - entName = unEntityNameHS $ entityHaskell entDef + entName = unEntityNameHS $ getUnboundEntityNameHS entDef mkClauses _ [] = return [] mkClauses before (field:after) = do x <- newName "x" let null' = ConP 'PersistNull [] pat = ListP $ mconcat - [ map (const null') before + [ fmap (const null') before , [VarP x] - , map (const null') after + , fmap (const null') after ] constr = ConE $ sumConstrName mps entDef field fs <- [|fromPersistValue $(return $ VarE x)|] @@ -802,6 +1260,8 @@ mkFromPersistValues mps entDef@(EntityDef { entitySum = True }) = do let clause = Clause [pat] (GuardedB [(guard', InfixE (Just constr) fmapE (Just fs))]) [] clauses <- mkClauses (field : before) after return $ clause : clauses + entE = entityDefConE entDef + type Lens s t a b = forall f. Functor f => (a -> f b) -> s -> f t @@ -811,7 +1271,10 @@ lensPTH sa sbt afb s = fmap (sbt s) (afb $ sa s) fmapE :: Exp fmapE = VarE 'fmap -mkLensClauses :: MkPersistSettings -> EntityDef -> Q [Clause] +unboundEntitySum :: UnboundEntityDef -> Bool +unboundEntitySum = entitySum . unboundEntityDef + +mkLensClauses :: MkPersistSettings -> UnboundEntityDef -> Q [Clause] mkLensClauses mps entDef = do lens' <- [|lensPTH|] getId <- [|entityKey|] @@ -824,9 +1287,9 @@ mkLensClauses mps entDef = do let idClause = normalClause [ConP (keyIdName entDef) []] (lens' `AppE` getId `AppE` setId) - if entitySum entDef - then return $ idClause : map (toSumClause lens' keyVar valName xName) (entityFields entDef) - else return $ idClause : map (toClause lens' getVal dot keyVar valName xName) (entityFields entDef) + return $ idClause : if unboundEntitySum entDef + then fmap (toSumClause lens' keyVar valName xName) (getUnboundFieldDefs entDef) + else fmap (toClause lens' getVal dot keyVar valName xName) (getUnboundFieldDefs entDef) where toClause lens' getVal dot keyVar valName xName fieldDef = normalClause [ConP (filterConName mps entDef fieldDef) []] @@ -854,7 +1317,7 @@ mkLensClauses mps entDef = do -- FIXME It would be nice if the types expressed that the Field is -- a sum type and therefore could result in Maybe. - : if length (entityFields entDef) > 1 then [emptyMatch] else [] + : if length (getUnboundFieldDefs entDef) > 1 then [emptyMatch] else [] setter = LamE [ ConP 'Entity [VarP keyVar, WildP] , VarP xName @@ -863,7 +1326,7 @@ mkLensClauses mps entDef = do -- | declare the key type and associated instances -- @'PathPiece'@, @'ToHttpApiData'@ and @'FromHttpApiData'@ instances are only generated for a Key with one field -mkKeyTypeDec :: MkPersistSettings -> EntityDef -> Q (Dec, [Dec]) +mkKeyTypeDec :: MkPersistSettings -> UnboundEntityDef -> Q (Dec, [Dec]) mkKeyTypeDec mps entDef = do (instDecs, i) <- if mpsGeneric mps @@ -889,7 +1352,7 @@ mkKeyTypeDec mps entDef = do -- This is much better for debugging/logging purposes -- cf. https://github.com/yesodweb/persistent/issues/1104 let alwaysStockStrategyTypeclasses = [''Show, ''Read] - deriveClauses = map (\typeclass -> + deriveClauses = fmap (\typeclass -> if (not useNewtype || typeclass `elem` alwaysStockStrategyTypeclasses) then DerivClause (Just StockStrategy) [(ConT typeclass)] else DerivClause (Just NewtypeStrategy) [(ConT typeclass)] @@ -910,7 +1373,8 @@ mkKeyTypeDec mps entDef = do unKeyE = unKeyExp entDef dec = RecC (keyConName entDef) (keyFields mps entDef) k = ''Key - recordType = genericDataType mps (entityHaskell entDef) backendT + recordType = + genericDataType mps (getUnboundEntityNameHS entDef) backendT pfInstD = -- FIXME: generate a PersistMap instead of PersistList [d|instance PersistField (Key $(pure recordType)) where toPersistValue = PersistList . keyToValues @@ -935,9 +1399,8 @@ mkKeyTypeDec mps entDef = do |] genericNewtypeInstances = do - requirePersistentExtensions + requirePersistentExtensions - instances <- do alwaysInstances <- -- See the "Always use StockStrategy" comment above, on why Show/Read use "stock" here [d|deriving stock instance Show (BackendKey $(pure backendT)) => Show (Key $(pure recordType)) @@ -953,68 +1416,118 @@ mkKeyTypeDec mps entDef = do deriving newtype instance FromJSON (BackendKey $(pure backendT)) => FromJSON (Key $(pure recordType)) |] - if customKeyType then return alwaysInstances - else fmap (alwaysInstances `mappend`) backendKeyGenericI - return instances + mappend alwaysInstances <$> + if customKeyType + then pure [] + else backendKeyGenericI useNewtype = pkNewtype mps entDef - customKeyType = not (defaultIdType entDef) || not useNewtype || isJust (entityPrimary entDef) + customKeyType = + or + [ not (defaultIdType entDef) + , not useNewtype + , isJust (entityPrimary (unboundEntityDef entDef)) + , not isBackendKey + ] + + isBackendKey = + case getImplicitIdType mps of + ConT bk `AppT` _ + | bk == ''BackendKey -> + True + _ -> + False supplement :: [Name] -> [Name] supplement names = names <> (filter (`notElem` names) $ mpsDeriveInstances mps) --- | Returns 'True' if the key definition has more than 1 field. +-- | Returns 'True' if the key definition has less than 2 fields. -- -- @since 2.11.0.0 -pkNewtype :: MkPersistSettings -> EntityDef -> Bool +pkNewtype :: MkPersistSettings -> UnboundEntityDef -> Bool pkNewtype mps entDef = length (keyFields mps entDef) < 2 -defaultIdType :: EntityDef -> Bool -defaultIdType entDef = fieldType (entityId entDef) == FTTypeCon Nothing (keyIdText entDef) - -keyFields :: MkPersistSettings -> EntityDef -> [(Name, Strict, Type)] -keyFields mps entDef = case entityPrimary entDef of - Just pdef -> map primaryKeyVar (compositeFields pdef) - Nothing -> if defaultIdType entDef - then [idKeyVar backendKeyType] - else [idKeyVar $ ftToType $ fieldType $ entityId entDef] +-- | Kind of a nasty hack. Checks to see if the 'fieldType' matches what the +-- QuasiQuoter produces for an implicit ID and +defaultIdType :: UnboundEntityDef -> Bool +defaultIdType entDef = + case unboundPrimarySpec entDef of + DefaultKey _ -> + True + _ -> + False + +keyFields :: MkPersistSettings -> UnboundEntityDef -> [(Name, Strict, Type)] +keyFields mps entDef = + case unboundPrimarySpec entDef of + NaturalKey ucd -> + fmap naturalKeyVar (unboundCompositeCols ucd) + DefaultKey _ -> + pure . idKeyVar $ getImplicitIdType mps + SurrogateKey k -> + pure . idKeyVar $ case unboundIdType k of + Nothing -> + getImplicitIdType mps + Just ty -> + ftToType ty where - backendKeyType - | mpsGeneric mps = ConT ''BackendKey `AppT` backendT - | otherwise = ConT ''BackendKey `AppT` mpsBackend mps - idKeyVar ft = (unKeyName entDef, notStrict, ft) - primaryKeyVar fieldDef = ( keyFieldName mps entDef fieldDef - , notStrict - , ftToType $ fieldType fieldDef - ) - -mkKeyToValues :: MkPersistSettings -> EntityDef -> Q Dec + unboundFieldDefs = + getUnboundFieldDefs entDef + naturalKeyVar fieldName = + case findField fieldName unboundFieldDefs of + Nothing -> + error "column not defined on entity" + Just unboundFieldDef -> + ( keyFieldName mps entDef (unboundFieldNameHS unboundFieldDef) + , notStrict + , ftToType $ unboundFieldType unboundFieldDef + ) + + idKeyVar ft = + ( unKeyName entDef + , notStrict + , ft + ) + +findField :: FieldNameHS -> [UnboundFieldDef] -> Maybe UnboundFieldDef +findField fieldName = + List.find ((fieldName ==) . unboundFieldNameHS) + +mkKeyToValues :: MkPersistSettings -> UnboundEntityDef -> Q Dec mkKeyToValues mps entDef = do - (p, e) <- case entityPrimary entDef of - Nothing -> - ([],) <$> [|(:[]) . toPersistValue . $(return $ unKeyExp entDef)|] - Just pdef -> - return $ toValuesPrimary pdef - return $ FunD 'keyToValues $ return $ normalClause p e + recordName <- newName "record" + FunD 'keyToValues . pure <$> + case unboundPrimarySpec entDef of + NaturalKey ucd -> do + normalClause [VarP recordName] <$> + toValuesPrimary recordName ucd + _ -> do + normalClause [] <$> + [|(:[]) . toPersistValue . $(pure $ unKeyExp entDef)|] where - toValuesPrimary pdef = - ( [VarP recordName] - , ListE $ map (\fieldDef -> VarE 'toPersistValue `AppE` (VarE (keyFieldName mps entDef fieldDef) `AppE` VarE recordName)) $ compositeFields pdef - ) - recordName = mkName "record" + toValuesPrimary recName ucd = + ListE <$> mapM (f recName) (unboundCompositeCols ucd) + f recName fieldNameHS = + [| + toPersistValue ($(varE $ keyFieldName mps entDef fieldNameHS) $(varE recName)) + |] normalClause :: [Pat] -> Exp -> Clause normalClause p e = Clause p (NormalB e) [] -mkKeyFromValues :: MkPersistSettings -> EntityDef -> Q Dec -mkKeyFromValues _mps entDef = do - clauses <- case entityPrimary entDef of - Nothing -> do - e <- [|fmap $(return keyConE) . fromPersistValue . headNote|] - return [normalClause [] e] - Just pdef -> - fromValues entDef "keyFromValues" keyConE (compositeFields pdef) - return $ FunD 'keyFromValues clauses +-- needs: +-- +-- * entityPrimary +-- * keyConExp entDef +mkKeyFromValues :: MkPersistSettings -> UnboundEntityDef -> Q Dec +mkKeyFromValues _mps entDef = + FunD 'keyFromValues <$> + case unboundPrimarySpec entDef of + NaturalKey ucd -> + fromValues entDef "keyFromValues" keyConE (unboundCompositeCols ucd) + _ -> do + e <- [|fmap $(return keyConE) . fromPersistValue . headNote|] + return [normalClause [] e] where keyConE = keyConExp entDef @@ -1023,38 +1536,63 @@ headNote = \case [x] -> x xs -> error $ "mkKeyFromValues: expected a list of one element, got: " `mappend` show xs -fromValues :: EntityDef -> Text -> Exp -> [FieldDef] -> Q [Clause] -fromValues entDef funName conE fields = do +-- needs from entity: +-- +-- * entityText entDef +-- * entityHaskell +-- * entityDB entDef +-- +-- needs from fields: +-- +-- * mkPersistValue +-- * fieldHaskell +-- +-- data MkFromValues = MkFromValues +-- { entityHaskell :: EntityNameHS +-- , entityDB :: EntitynameDB +-- , entityFieldNames :: [FieldNameHS] +-- } +fromValues :: UnboundEntityDef -> Text -> Exp -> [FieldNameHS] -> Q [Clause] +fromValues entDef funName constructExpr fields = do x <- newName "x" - let funMsg = entityText entDef `mappend` ": " `mappend` funName `mappend` " failed on: " - patternMatchFailure <- [|Left $ mappend funMsg (pack $ show $(return $ VarE x))|] + let + funMsg = + mconcat + [ entityText entDef + , ": " + , funName + , " failed on: " + ] + patternMatchFailure <- + [|Left $ mappend funMsg (pack $ show $(return $ VarE x))|] suc <- patternSuccess return [ suc, normalClause [VarP x] patternMatchFailure ] where - tableName = unEntityNameDB (entityDB entDef) + tableName = + unEntityNameDB (entityDB (unboundEntityDef entDef)) patternSuccess = case fields of [] -> do rightE <- [|Right|] - return $ normalClause [ListP []] (rightE `AppE` conE) + return $ normalClause [ListP []] (rightE `AppE` constructExpr) _ -> do x1 <- newName "x1" restNames <- mapM (\i -> newName $ "x" `mappend` show i) [2..length fields] (fpv1:mkPersistValues) <- mapM mkPersistValue fields app1E <- [|(<$>)|] - let conApp = infixFromPersistValue app1E fpv1 conE x1 + let conApp = infixFromPersistValue app1E fpv1 constructExpr x1 applyE <- [|(<*>)|] let applyFromPersistValue = infixFromPersistValue applyE return $ normalClause - [ListP $ map VarP (x1:restNames)] + [ListP $ fmap VarP (x1:restNames)] (foldl' (\exp (name, fpv) -> applyFromPersistValue fpv exp name) conApp (zip restNames mkPersistValues)) infixFromPersistValue applyE fpv exp name = UInfixE exp applyE (fpv `AppE` VarE name) mkPersistValue field = - let fieldName = (unFieldNameHS (fieldHaskell field)) + let fieldName = unFieldNameHS field in [|mapLeft (fieldError tableName fieldName) . fromPersistValue|] -- | Render an error message based on the @tableName@ and @fieldName@ with @@ -1071,23 +1609,22 @@ fieldError tableName fieldName err = mconcat , err ] -mkEntity :: EntityMap -> MkPersistSettings -> EntityDef -> Q [Dec] -mkEntity entityMap mps entDef = do - fields <- mkFields mps entDef - entityDefExp <- - if mpsGeneric mps - then liftAndFixKeys entityMap entDef - else makePersistEntityDefExp mps entityMap entDef - +mkEntity :: M.Map EntityNameHS a -> EntityMap -> MkPersistSettings -> UnboundEntityDef -> Q [Dec] +mkEntity embedEntityMap entityMap mps preDef = do + entityDefExp <- liftAndFixKeys mps embedEntityMap entityMap preDef + let + entDef = + fixEntityDef preDef + fields <- mkFields mps entityMap entDef let name = mkEntityDefName entDef let clazz = ConT ''PersistEntity `AppT` genDataType tpf <- mkToPersistFields mps entDef fpv <- mkFromPersistValues mps entDef - utv <- mkUniqueToValues $ entityUniques entDef + utv <- mkUniqueToValues $ entityUniques $ unboundEntityDef entDef puk <- mkUniqueKeys entDef - fkc <- mapM (mkForeignKeysComposite mps entDef) $ entityForeigns entDef + fkc <- mapM (mkForeignKeysComposite mps entDef) $ unboundForeignDefs entDef - toFieldNames <- mkToFieldNames $ entityUniques entDef + toFieldNames <- mkToFieldNames $ entityUniques $ unboundEntityDef entDef (keyTypeDec, keyInstanceDecs) <- mkKeyTypeDec mps entDef keyToValues' <- mkKeyToValues mps entDef @@ -1101,41 +1638,49 @@ mkEntity entityMap mps entDef = do lensClauses <- mkLensClauses mps entDef - lenses <- mkLenses mps entDef + lenses <- mkLenses mps entityMap entDef let instanceConstraint = if not (mpsGeneric mps) then [] else [mkClassP ''PersistStore [backendT]] [keyFromRecordM'] <- - case entityPrimary entDef of - Just prim -> do + case unboundPrimarySpec entDef of + NaturalKey ucd -> do recordName <- newName "record" - let keyCon = keyConName entDef - keyFields' = fieldDefToRecordName mps entDef <$> compositeFields prim + let + keyCon = + keyConName entDef + keyFields' = + fieldNameToRecordName mps entDef <$> unboundCompositeCols ucd constr = foldl' AppE (ConE keyCon) - (map + (toList $ fmap (\n -> VarE n `AppE` VarE recordName ) keyFields' ) keyFromRec = varP 'keyFromRecordM - [d|$(keyFromRec) = Just ( \ $(varP recordName) -> $(pure constr)) |] + [d| + $(keyFromRec) = Just ( \ $(varP recordName) -> $(pure constr)) + |] - Nothing -> + _ -> [d|$(varP 'keyFromRecordM) = Nothing|] - dtd <- dataTypeDec mps entDef - let allEntDefs = entityFieldTHCon <$> efthAllFields fields - allEntDefClauses = entityFieldTHClause <$> efthAllFields fields + dtd <- dataTypeDec mps entityMap entDef + let + allEntDefs = + entityFieldTHCon <$> efthAllFields fields + allEntDefClauses = + entityFieldTHClause <$> efthAllFields fields return $ addSyn $ dtd : mconcat fkc `mappend` ( [ TySynD (keyIdName entDef) [] $ ConT ''Key `AppT` ConT name , instanceD instanceConstraint clazz - [ uniqueTypeDec mps entDef + [ uniqueTypeDec mps entityMap entDef , keyTypeDec , keyToValues' , keyFromValues' @@ -1184,8 +1729,10 @@ mkEntity entityMap mps entDef = do ] ] `mappend` lenses) `mappend` keyInstanceDecs where - genDataType = genericDataType mps entName backendT - entName = entityHaskell entDef + genDataType = + genericDataType mps entName backendT + entName = + getUnboundEntityNameHS preDef data EntityFieldsTH = EntityFieldsTH { entityFieldsTHPrimary :: EntityFieldTH @@ -1193,18 +1740,77 @@ data EntityFieldsTH = EntityFieldsTH } efthAllFields :: EntityFieldsTH -> [EntityFieldTH] -efthAllFields EntityFieldsTH{..} = entityFieldsTHPrimary : entityFieldsTHFields +efthAllFields EntityFieldsTH{..} = + stripIdFieldDef entityFieldsTHPrimary : entityFieldsTHFields + +stripIdFieldDef :: EntityFieldTH -> EntityFieldTH +stripIdFieldDef efth = efth + { entityFieldTHClause = + go (entityFieldTHClause efth) + } + where + go (Clause ps bdy ds) = + Clause ps bdy' ds + where + bdy' = + case bdy of + NormalB e -> + NormalB $ AppE (VarE 'stripIdFieldImpl) e + _ -> + bdy + +-- | @persistent@ used to assume that an Id was always a single field. +-- +-- This method preserves as much backwards compatibility as possible. +stripIdFieldImpl :: HasCallStack => EntityIdDef -> FieldDef +stripIdFieldImpl eid = + case eid of + EntityIdField fd -> fd + EntityIdNaturalKey cd -> + case compositeFields cd of + (x :| xs) -> + case xs of + [] -> + x + _ -> + dummyFieldDef + where + dummyFieldDef = + FieldDef + { fieldHaskell = + FieldNameHS "Id" + , fieldDB = + FieldNameDB "__composite_key_no_id__" + , fieldType = + FTTypeCon Nothing "__Composite_Key__" + , fieldSqlType = + SqlOther "Composite Key" + , fieldAttrs = + [] + , fieldStrict = + False + , fieldReference = + NoReference + , fieldCascade = + noCascade + , fieldComments = + Nothing + , fieldGenerated = + Nothing + , fieldIsImplicitIdColumn = + False + } -mkFields :: MkPersistSettings -> EntityDef -> Q EntityFieldsTH -mkFields mps entDef = +mkFields :: MkPersistSettings -> EntityMap -> UnboundEntityDef -> Q EntityFieldsTH +mkFields mps entityMap entDef = EntityFieldsTH - <$> mkField mps entDef (entityId entDef) - <*> mapM (mkField mps entDef) (entityFields entDef) + <$> mkIdField mps entDef + <*> mapM (mkField mps entityMap entDef) (getUnboundFieldDefs entDef) -mkUniqueKeyInstances :: MkPersistSettings -> EntityDef -> Q [Dec] +mkUniqueKeyInstances :: MkPersistSettings -> UnboundEntityDef -> Q [Dec] mkUniqueKeyInstances mps entDef = do requirePersistentExtensions - case entityUniques entDef of + case entityUniques (unboundEntityDef entDef) of [] -> mappend <$> typeErrorSingle <*> typeErrorAtLeastOne [_] -> mappend <$> singleUniqueKey <*> atLeastOneKey (_:_) -> mappend <$> typeErrorMultiple <*> atLeastOneKey @@ -1270,15 +1876,16 @@ mkUniqueKeyInstances mps entDef = do cxt <- withPersistStoreWriteCxt pure [instanceD cxt atLeastOneUniqueKeyClass impl] - genDataType = genericDataType mps (entityHaskell entDef) backendT + genDataType = + genericDataType mps (getUnboundEntityNameHS entDef) backendT -entityText :: EntityDef -> Text -entityText = unEntityNameHS . entityHaskell +entityText :: UnboundEntityDef -> Text +entityText = unEntityNameHS . getUnboundEntityNameHS -mkLenses :: MkPersistSettings -> EntityDef -> Q [Dec] -mkLenses mps _ | not (mpsGenerateLenses mps) = return [] -mkLenses _ ent | entitySum ent = return [] -mkLenses mps ent = fmap mconcat $ forM (entityFields ent) $ \field -> do +mkLenses :: MkPersistSettings -> EntityMap -> UnboundEntityDef -> Q [Dec] +mkLenses mps _ _ | not (mpsGenerateLenses mps) = return [] +mkLenses _ _ ent | entitySum (unboundEntityDef ent) = return [] +mkLenses mps entityMap ent = fmap mconcat $ forM (getUnboundFieldDefs ent) $ \field -> do let lensName = mkEntityLensName mps ent field fieldName = fieldDefToRecordName mps ent field needleN <- newName "needle" @@ -1297,9 +1904,12 @@ mkLenses mps ent = fmap mconcat $ forM (entityFields ent) $ \field -> do -- values backend1 = backendName backend2 = backendName - aT = maybeIdType mps field (Just backend1) Nothing - bT = maybeIdType mps field (Just backend2) Nothing - mkST backend = genericDataType mps (entityHaskell ent) (VarT backend) + aT = + maybeIdType mps entityMap field (Just backend1) Nothing + bT = + maybeIdType mps entityMap field (Just backend2) Nothing + mkST backend = + genericDataType mps (getUnboundEntityNameHS ent) (VarT backend) sT = mkST backend1 tT = mkST backend2 t1 `arrow` t2 = ArrowT `AppT` t1 `AppT` t2 @@ -1323,35 +1933,91 @@ mkLenses mps ent = fmap mconcat $ forM (entityFields ent) $ \field -> do ] ] -mkForeignKeysComposite :: MkPersistSettings -> EntityDef -> ForeignDef -> Q [Dec] -mkForeignKeysComposite mps entDef ForeignDef {..} = - if not foreignToPrimary then return [] else do - let fieldName = fieldNameToRecordName mps entDef - let fname = fieldName (constraintToField foreignConstraintNameHaskell) - let reftableString = unpack $ unEntityNameHS foreignRefTableHaskell - let reftableKeyName = mkName $ reftableString `mappend` "Key" - let tablename = mkEntityDefName entDef - recordName <- newName "record" - - let mkFldE ((foreignName, _),ff) = case ff of - (FieldNameHS {unFieldNameHS = "Id"}, FieldNameDB {unFieldNameDB = "id"}) - -> AppE (VarE $ mkName "toBackendKey") $ - VarE (fieldName foreignName) `AppE` VarE recordName - _ -> VarE (fieldName foreignName) `AppE` VarE recordName - let fldsE = map mkFldE foreignFields - let mkKeyE = foldl' AppE (maybeExp foreignNullable $ ConE reftableKeyName) fldsE - let fn = FunD fname [normalClause [VarP recordName] mkKeyE] +mkForeignKeysComposite + :: MkPersistSettings + -> UnboundEntityDef + -> UnboundForeignDef + -> Q [Dec] +mkForeignKeysComposite mps entDef foreignDef + | foreignToPrimary (unboundForeignDef foreignDef) = do + let + fieldName = + fieldNameToRecordName mps entDef + fname = + fieldName $ constraintToField $ foreignConstraintNameHaskell $ unboundForeignDef foreignDef + reftableString = + unpack $ unEntityNameHS $ foreignRefTableHaskell $ unboundForeignDef foreignDef + reftableKeyName = + mkName $ reftableString `mappend` "Key" + tablename = + mkEntityDefName entDef + fieldStore = + mkFieldStore entDef + + recordName <- newName "record_mkForeignKeysComposite" + + let + mkFldE foreignName = + -- using coerce here to convince SqlBackendKey to go away + VarE 'coerce `AppE` + (VarE (fieldName foreignName) `AppE` VarE recordName) + mkFldR ffr = + let + e = + mkFldE (ffrSourceField ffr) + in + case ffrTargetField ffr of + FieldNameHS "Id" -> + VarE 'toBackendKey `AppE` + e + _ -> + e + foreignFieldNames foreignFieldList = + case foreignFieldList of + FieldListImpliedId names -> + names + FieldListHasReferences refs -> + fmap ffrSourceField refs + + fldsE = + getForeignNames $ (unboundForeignFields foreignDef) + getForeignNames = \case + FieldListImpliedId xs -> + fmap mkFldE xs + FieldListHasReferences xs -> + fmap mkFldR xs + + nullErr n = + error $ "Could not find field definition for: " <> show n + fNullable = + setNull + $ fmap (\n -> fromMaybe (nullErr n) $ getFieldDef n fieldStore) + $ foreignFieldNames + $ unboundForeignFields foreignDef + mkKeyE = + foldl' AppE (maybeExp fNullable $ ConE reftableKeyName) fldsE + fn = + FunD fname [normalClause [VarP recordName] mkKeyE] + + keyTargetTable = + maybeTyp fNullable $ ConT ''Key `AppT` ConT (mkName reftableString) + + sigTy <- [t| $(conT tablename) -> $(pure keyTargetTable) |] + pure + [ SigD fname sigTy + , fn + ] - let t2 = maybeTyp foreignNullable $ ConT ''Key `AppT` ConT (mkName reftableString) - let sig = SigD fname $ (ArrowT `AppT` (ConT tablename)) `AppT` t2 - return [sig, fn] + | otherwise = + pure [] + where + constraintToField = FieldNameHS . unConstraintNameHS - where - constraintToField = FieldNameHS . unConstraintNameHS maybeExp :: Bool -> Exp -> Exp maybeExp may exp | may = fmapE `AppE` exp | otherwise = exp + maybeTyp :: Bool -> Type -> Type maybeTyp may typ | may = ConT ''Maybe `AppT` typ | otherwise = typ @@ -1359,8 +2025,8 @@ maybeTyp may typ | may = ConT ''Maybe `AppT` typ entityToPersistValueHelper :: (PersistEntity record) => record -> PersistValue entityToPersistValueHelper entity = PersistMap $ zip columnNames fieldsAsPersistValues where - columnNames = map (unFieldNameHS . fieldHaskell) (entityFields (entityDef (Just entity))) - fieldsAsPersistValues = map toPersistValue $ toPersistFields entity + columnNames = fmap (unFieldNameHS . fieldHaskell) (getEntityFields (entityDef (Just entity))) + fieldsAsPersistValues = fmap toPersistValue $ toPersistFields entity entityFromPersistValueHelper :: (PersistEntity record) @@ -1375,7 +2041,7 @@ entityFromPersistValueHelper columnNames pv = do lookupPersistValueByColumnName columnName = fromMaybe PersistNull (HM.lookup (pack columnName) columnMap) - fromPersistValues $ map lookupPersistValueByColumnName columnNames + fromPersistValues $ fmap lookupPersistValueByColumnName columnNames -- | Produce code similar to the following: -- @@ -1385,7 +2051,7 @@ entityFromPersistValueHelper columnNames pv = do -- fromPersistValue = entityFromPersistValueHelper ["col1", "col2"] -- sqlType _ = SqlString -- @ -persistFieldFromEntity :: MkPersistSettings -> EntityDef -> Q [Dec] +persistFieldFromEntity :: MkPersistSettings -> UnboundEntityDef -> Q [Dec] persistFieldFromEntity mps entDef = do sqlStringConstructor' <- [|SqlString|] toPersistValueImplementation <- [|entityToPersistValueHelper|] @@ -1402,19 +2068,25 @@ persistFieldFromEntity mps entDef = do ] ] where - typ = genericDataType mps (entityHaskell entDef) backendT - entFields = entityFields entDef - columnNames = map (unpack . unFieldNameHS . fieldHaskell) entFields + typ = + genericDataType mps (entityHaskell (unboundEntityDef entDef)) backendT + entFields = + filter isHaskellUnboundField $ getUnboundFieldDefs entDef + columnNames = + fmap (unpack . unFieldNameHS . unboundFieldNameHS) entFields -- | Apply the given list of functions to the same @EntityDef@s. -- -- This function is useful for cases such as: -- -- >>> share [mkSave "myDefs", mkPersist sqlSettings] [persistLowerCase|...|] -share :: [[EntityDef] -> Q [Dec]] -> [EntityDef] -> Q [Dec] +share :: [[a] -> Q [Dec]] -> [a] -> Q [Dec] share fs x = mconcat <$> mapM ($ x) fs -- | Save the @EntityDef@s passed in under the given name. +-- +-- This function was deprecated in @persistent-2.13.0.0@. It doesn't properly +-- fix foreign keys. Please refer to 'mkEntityDefList' for a replacement. mkSave :: String -> [EntityDef] -> Q [Dec] mkSave name' defs' = do let name = mkName name' @@ -1423,6 +2095,8 @@ mkSave name' defs' = do , FunD name [normalClause [] defs] ] +{-# DEPRECATED mkSave "This function is broken. mkEntityDefList is a drop-in replacement that will properly handle foreign keys correctly." #-} + data Dep = Dep { depTarget :: EntityNameHS , depSourceTable :: EntityNameHS @@ -1430,29 +2104,36 @@ data Dep = Dep , depSourceNull :: IsNullable } +{-# DEPRECATED mkDeleteCascade "You can now set update and delete cascade behavior directly on the entity in the quasiquoter. This function and class are deprecated and will be removed in the next major ersion." #-} + -- | Generate a 'DeleteCascade' instance for the given @EntityDef@s. -mkDeleteCascade :: MkPersistSettings -> [EntityDef] -> Q [Dec] +-- +-- This function is deprecated as of 2.13.0.0. You can now set cascade +-- behavior directly in the quasiquoter. +mkDeleteCascade :: MkPersistSettings -> [UnboundEntityDef] -> Q [Dec] mkDeleteCascade mps defs = do let deps = concatMap getDeps defs mapM (go deps) defs where - getDeps :: EntityDef -> [Dep] + getDeps :: UnboundEntityDef -> [Dep] getDeps def = - concatMap getDeps' $ entityFields $ fixEntityDef def + concatMap getDeps' $ getUnboundFieldDefs $ fixEntityDef def where - getDeps' :: FieldDef -> [Dep] - getDeps' field@FieldDef {..} = - case foreignReference field of + getDeps' :: UnboundFieldDef -> [Dep] + getDeps' field = + case guessFieldReference field of Just name -> - return Dep + return Dep { depTarget = name - , depSourceTable = entityHaskell def - , depSourceField = fieldHaskell - , depSourceNull = nullable fieldAttrs + , depSourceTable = entityHaskell (unboundEntityDef def) + , depSourceField = unboundFieldNameHS field + , depSourceNull = nullable (unboundFieldAttrs field) } - Nothing -> [] - go :: [Dep] -> EntityDef -> Q Dec - go allDeps EntityDef{entityHaskell = name} = do + Nothing -> + [] + go :: [Dep] -> UnboundEntityDef -> Q Dec + go allDeps ued = do + let name = entityHaskell (unboundEntityDef ued) let deps = filter (\x -> depTarget x == name) allDeps key <- newName "key" let del = VarE 'delete @@ -1475,7 +2156,7 @@ mkDeleteCascade mps defs = do val _ = VarE key let stmts :: [Stmt] - stmts = map mkStmt deps `mappend` + stmts = fmap mkStmt deps `mappend` [NoBindS $ del `AppE` VarE key] let entityT = genericDataType mps name backendT @@ -1505,7 +2186,7 @@ mkDeleteCascade mps defs = do mkEntityDefList :: String -- ^ The name that will be given to the 'EntityDef' list. - -> [EntityDef] + -> [UnboundEntityDef] -> Q [Dec] mkEntityDefList entityList entityDefs = do let entityListName = mkName entityList @@ -1520,27 +2201,27 @@ mkEntityDefList entityList entityDefs = do , ValD (VarP entityListName) (NormalB edefs) [] ] -mkUniqueKeys :: EntityDef -> Q Dec -mkUniqueKeys def | entitySum def = +mkUniqueKeys :: UnboundEntityDef -> Q Dec +mkUniqueKeys def | entitySum (unboundEntityDef def) = return $ FunD 'persistUniqueKeys [normalClause [WildP] (ListE [])] mkUniqueKeys def = do c <- clause return $ FunD 'persistUniqueKeys [c] where clause = do - xs <- forM (entityFields def) $ \fieldDef -> do - let x = fieldHaskell fieldDef + xs <- forM (getUnboundFieldDefs def) $ \fieldDef -> do + let x = unboundFieldNameHS fieldDef x' <- newName $ '_' : unpack (unFieldNameHS x) return (x, x') - let pcs = map (go xs) $ entityUniques def + let pcs = fmap (go xs) $ entityUniques $ unboundEntityDef def let pat = ConP (mkEntityDefName def) - (map (VarP . snd) xs) + (fmap (VarP . snd) xs) return $ normalClause [pat] (ListE pcs) go :: [(FieldNameHS, Name)] -> UniqueDef -> Exp go xs (UniqueDef name _ cols _) = - foldl' (go' xs) (ConE (mkConstraintName name)) (map fst cols) + foldl' (go' xs) (ConE (mkConstraintName name)) (toList $ fmap fst cols) go' :: [(FieldNameHS, Name)] -> Exp -> FieldNameHS -> Exp go' xs front col = @@ -1635,101 +2316,67 @@ derivePersistFieldJSON s = do ] ] +-- | The basic function for migrating models, no Template Haskell required. +-- +-- It's probably best to use this in concert with 'mkEntityDefList', and then +-- call 'migrateModels' with the result from that function. +-- +-- @ +-- share [mkPersist sqlSettings, mkEntityDefList "entities"] [persistLowerCase| ... |] +-- +-- migrateAll = 'migrateModels' entities +-- @ +-- +-- The function 'mkMigrate' currently implements exactly this behavior now. If +-- you're splitting up the entity definitions into separate files, then it is +-- better to use the entity definition list and the concatenate all the models +-- together into a big list to call with 'migrateModels'. +-- +-- @ +-- module Foo where +-- +-- share [mkPersist s, mkEntityDefList "fooModels"] ... +-- +-- +-- module Bar where +-- +-- share [mkPersist s, mkEntityDefList "barModels"] ... +-- +-- module Migration where +-- +-- import Foo +-- import Bar +-- +-- migrateAll = migrateModels (fooModels <> barModels) +-- @ +-- +-- @since 2.13.0.0 +migrateModels :: [EntityDef] -> Migration +migrateModels defs= + forM_ (filter isMigrated defs) $ \def -> + migrate defs def + where + isMigrated def = pack "no-migrate" `notElem` entityAttrs def + -- | Creates a single function to perform all migrations for the entities -- defined here. One thing to be aware of is dependencies: if you have entities -- with foreign references, make sure to place those definitions after the -- entities they reference. -mkMigrate :: String -> [EntityDef] -> Q [Dec] -mkMigrate fun allDefs = do - body' <- body - return - [ SigD (mkName fun) typ - , FunD (mkName fun) [normalClause [] body'] +-- +-- In @persistent-2.13.0.0@, this was changed to *ignore* the input entity def +-- list, and instead defer to 'mkEntityDefList' to get the correct entities. +-- This avoids problems where the QuasiQuoter is unable to know what the right +-- reference types are. This sets 'mkPersist' to be the "single source of truth" +-- for entity definitions. +mkMigrate :: String -> [UnboundEntityDef] -> Q [Dec] +mkMigrate fun eds = do + let entityDefListName = ("entityDefListFor" <> fun) + body <- [| migrateModels $(varE (mkName entityDefListName)) |] + edList <- mkEntityDefList entityDefListName eds + pure $ edList <> + [ SigD (mkName fun) (ConT ''Migration) + , FunD (mkName fun) [normalClause [] body] ] - where - defs = filter isMigrated allDefs - isMigrated def = "no-migrate" `notElem` entityAttrs def - typ = ConT ''Migration - entityMap = constructEntityMap allDefs - body :: Q Exp - body = - case defs of - [] -> [|return ()|] - _ -> do - defsName <- newName "defs" - defsStmt <- do - defs' <- mapM (liftAndFixKeys entityMap) defs - let defsExp = ListE defs' - return $ LetS [ValD (VarP defsName) (NormalB defsExp) []] - stmts <- mapM (toStmt $ VarE defsName) defs - return (DoE $ defsStmt : stmts) - toStmt :: Exp -> EntityDef -> Q Stmt - toStmt defsExp ed = do - u <- liftAndFixKeys entityMap ed - m <- [|migrate|] - return $ NoBindS $ m `AppE` defsExp `AppE` u - -makePersistEntityDefExp :: MkPersistSettings -> EntityMap -> EntityDef -> Q Exp -makePersistEntityDefExp mps entityMap entDef@EntityDef{..} = - [|EntityDef - entityHaskell - entityDB - $(liftAndFixKey entityMap entityId) - entityAttrs - $(fieldDefReferences mps entDef entityFields) - entityUniques - entityForeigns - entityDerives - entityExtra - entitySum - entityComments - |] - -fieldDefReferences :: MkPersistSettings -> EntityDef -> [FieldDef] -> Q Exp -fieldDefReferences mps entDef fieldDefs = - fmap ListE $ forM fieldDefs $ \fieldDef -> do - let fieldDefConE = ConE (filterConName mps entDef fieldDef) - pure $ VarE 'persistFieldDef `AppE` fieldDefConE - -liftAndFixKeys :: EntityMap -> EntityDef -> Q Exp -liftAndFixKeys entityMap EntityDef{..} = - [|EntityDef - entityHaskell - entityDB - $(liftAndFixKey entityMap entityId) - entityAttrs - $(ListE <$> mapM (liftAndFixKey entityMap) entityFields) - entityUniques - entityForeigns - entityDerives - entityExtra - entitySum - entityComments - |] - -liftAndFixKey :: EntityMap -> FieldDef -> Q Exp -liftAndFixKey entityMap fieldDef@(FieldDef a b c sqlTyp e f fieldRef fc mcomments fg) = - [|FieldDef a b c $(sqlTyp') e f fieldRef' fc mcomments fg|] - where - (fieldRef', sqlTyp') = - case extractForeignRef entityMap fieldDef of - Just (fr, ft) -> - (fr, lift (SqlTypeExp ft)) - Nothing -> - (fieldRef, lift sqlTyp) - -extractForeignRef :: EntityMap -> FieldDef -> Maybe (ReferenceDef, FieldType) -extractForeignRef entityMap fieldDef = - case fieldReference fieldDef of - ForeignRef refName _ft -> do - ent <- M.lookup refName entityMap - case fieldReference $ entityId ent of - fr@(ForeignRef _ ft) -> - Just (fr, ft) - _ -> - Nothing - _ -> - Nothing data EntityFieldTH = EntityFieldTH { entityFieldTHCon :: Con @@ -1742,22 +2389,77 @@ data EntityFieldTH = EntityFieldTH -- forall . typ ~ FieldType => EntFieldName -- -- EntFieldName = FieldDef .... -mkField :: MkPersistSettings -> EntityDef -> FieldDef -> Q EntityFieldTH -mkField mps et cd = do - let con = ForallC +-- +-- Field Def Accessors Required: +mkField :: MkPersistSettings -> EntityMap -> UnboundEntityDef -> UnboundFieldDef -> Q EntityFieldTH +mkField mps entityMap et fieldDef = do + let + con = + ForallC [] - [mkEqualP (VarT $ mkName "typ") $ maybeIdType mps cd Nothing Nothing] + [mkEqualP (VarT $ mkName "typ") $ maybeIdType mps entityMap fieldDef Nothing Nothing] $ NormalC name [] - bod <- lift cd + bod <- mkLookupEntityField et (unboundFieldNameHS fieldDef) let cla = normalClause [ConP name []] bod return $ EntityFieldTH con cla where - name = filterConName mps et cd + name = filterConName mps et fieldDef + +mkIdField :: MkPersistSettings -> UnboundEntityDef -> Q EntityFieldTH +mkIdField mps ued = do + let + entityName = + getUnboundEntityNameHS ued + entityIdType + | mpsGeneric mps = + ConT ''Key `AppT` ( + ConT (mkEntityNameHSGenericName entityName) + `AppT` backendT + ) + | otherwise = + ConT $ mkName $ (T.unpack $ unEntityNameHS entityName) ++ "Id" + name = + filterConName' mps entityName (FieldNameHS "Id") + clause <- + fixPrimarySpec mps ued + pure EntityFieldTH + { entityFieldTHCon = + ForallC + [] + [mkEqualP (VarT $ mkName "typ") entityIdType] + $ NormalC name [] + , entityFieldTHClause = + normalClause [ConP name []] clause + } + +lookupEntityField + :: PersistEntity entity + => Proxy entity + -> FieldNameHS + -> FieldDef +lookupEntityField prxy fieldNameHS = + fromMaybe boom $ List.find ((fieldNameHS ==) . fieldHaskell) $ entityFields $ entityDef prxy + where + boom = + error "Database.Persist.TH.Internal.lookupEntityField: failed to find entity field with database name" -maybeNullable :: FieldDef -> Bool -maybeNullable fd = nullable (fieldAttrs fd) == Nullable ByMaybeAttr +mkLookupEntityField + :: UnboundEntityDef + -> FieldNameHS + -> Q Exp +mkLookupEntityField ued ufd = + [| + lookupEntityField + (Proxy :: Proxy $(conT entityName)) + $(lift ufd) + |] + where + entityName = mkEntityNameHSName (getUnboundEntityNameHS ued) + +maybeNullable :: UnboundFieldDef -> Bool +maybeNullable fd = nullable (unboundFieldAttrs fd) == Nullable ByMaybeAttr ftToType :: FieldType -> Type ftToType = \case @@ -1777,12 +2479,12 @@ ftToType = \case ListT `AppT` ftToType x infixr 5 ++ -(++) :: Text -> Text -> Text -(++) = append +(++) :: Monoid m => m -> m -> m +(++) = mappend -mkJSON :: MkPersistSettings -> EntityDef -> Q [Dec] -mkJSON _ def | ("json" `notElem` entityAttrs def) = return [] -mkJSON mps def = do +mkJSON :: MkPersistSettings -> UnboundEntityDef -> Q [Dec] +mkJSON _ def | ("json" `notElem` entityAttrs (unboundEntityDef def)) = return [] +mkJSON mps (fixEntityDef -> def) = do requireExtensions [[FlexibleInstances]] pureE <- [|pure|] apE' <- [|(<*>)|] @@ -1793,37 +2495,51 @@ mkJSON mps def = do objectE <- [|object|] obj <- newName "obj" mzeroE <- [|mzero|] - - xs <- mapM fieldToJSONValName (entityFields def) - - let conName = mkEntityDefName def - typ = genericDataType mps (entityHaskell def) backendT - toJSONI = typeInstanceD ''ToJSON (mpsGeneric mps) typ [toJSON'] - toJSON' = FunD 'toJSON $ return $ normalClause - [ConP conName $ map VarP xs] - (objectE `AppE` ListE pairs) - pairs = zipWith toPair (entityFields def) xs - toPair f x = InfixE - (Just (packE `AppE` LitE (StringL $ unpack $ unFieldNameHS $ fieldHaskell f))) - dotEqualE - (Just $ VarE x) - fromJSONI = typeInstanceD ''FromJSON (mpsGeneric mps) typ [parseJSON'] - parseJSON' = FunD 'parseJSON - [ normalClause [ConP 'Object [VarP obj]] - (foldl' - (\x y -> InfixE (Just x) apE' (Just y)) - (pureE `AppE` ConE conName) - pulls - ) - , normalClause [WildP] mzeroE - ] - pulls = map toPull $ entityFields def - toPull f = InfixE - (Just $ VarE obj) - (if maybeNullable f then dotColonQE else dotColonE) - (Just $ AppE packE $ LitE $ StringL $ unpack $ unFieldNameHS $ fieldHaskell f) + let + fields = + getUnboundFieldDefs def + + xs <- mapM fieldToJSONValName fields + + let + conName = + mkEntityDefName def + typ = + genericDataType mps (entityHaskell (unboundEntityDef def)) backendT + toJSONI = + typeInstanceD ''ToJSON (mpsGeneric mps) typ [toJSON'] + where + toJSON' = FunD 'toJSON $ return $ normalClause + [ConP conName $ fmap VarP xs] + (objectE `AppE` ListE pairs) + where + pairs = zipWith toPair fields xs + toPair f x = InfixE + (Just (packE `AppE` LitE (StringL $ unpack $ unFieldNameHS $ unboundFieldNameHS f))) + dotEqualE + (Just $ VarE x) + fromJSONI = + typeInstanceD ''FromJSON (mpsGeneric mps) typ [parseJSON'] + where + parseJSON' = FunD 'parseJSON + [ normalClause [ConP 'Object [VarP obj]] + (foldl' + (\x y -> InfixE (Just x) apE' (Just y)) + (pureE `AppE` ConE conName) + pulls + ) + , normalClause [WildP] mzeroE + ] + where + pulls = + fmap toPull fields + toPull f = InfixE + (Just $ VarE obj) + (if maybeNullable f then dotColonQE else dotColonE) + (Just $ AppE packE $ LitE $ StringL $ unpack $ unFieldNameHS $ unboundFieldNameHS f) case mpsEntityJSON mps of - Nothing -> return [toJSONI, fromJSONI] + Nothing -> + return [toJSONI, fromJSONI] Just entityJSON -> do entityJSONIs <- if mpsGeneric mps then [d| @@ -1855,43 +2571,13 @@ isStrict = Bang NoSourceUnpackedness SourceStrict instanceD :: Cxt -> Type -> [Dec] -> Dec instanceD = InstanceD Nothing --- entityUpdates :: EntityDef -> [(EntityNameHS, FieldType, IsNullable, PersistUpdate)] --- entityUpdates = --- concatMap go . entityFields --- where --- go FieldDef {..} = map (\a -> (fieldHaskell, fieldType, nullable fieldAttrs, a)) [minBound..maxBound] - --- mkToUpdate :: String -> [(String, PersistUpdate)] -> Q Dec --- mkToUpdate name pairs = do --- pairs' <- mapM go pairs --- return $ FunD (mkName name) $ degen pairs' --- where --- go (constr, pu) = do --- pu' <- lift pu --- return $ normalClause [RecP (mkName constr) []] pu' - --- mkToFieldName :: String -> [(String, String)] -> Dec --- mkToFieldName func pairs = --- FunD (mkName func) $ degen $ map go pairs --- where --- go (constr, name) = --- normalClause [RecP (mkName constr) []] (LitE $ StringL name) - --- mkToValue :: String -> [String] -> Dec --- mkToValue func = FunD (mkName func) . degen . map go --- where --- go constr = --- let x = mkName "x" --- in normalClause [ConP (mkName constr) [VarP x]] --- (VarE 'toPersistValue `AppE` VarE x) - -- | Check that all of Persistent's required extensions are enabled, or else fail compilation -- -- This function should be called before any code that depends on one of the required extensions being enabled. requirePersistentExtensions :: Q () requirePersistentExtensions = requireExtensions requiredExtensions where - requiredExtensions = map pure + requiredExtensions = fmap pure [ DerivingStrategies , GeneralizedNewtypeDeriving , StandaloneDeriving @@ -1899,36 +2585,69 @@ requirePersistentExtensions = requireExtensions requiredExtensions , MultiParamTypeClasses ] -mkSymbolToFieldInstances :: MkPersistSettings -> EntityDef -> Q [Dec] -mkSymbolToFieldInstances mps ed = do - fmap join $ forM (keyAndEntityFields ed) $ \fieldDef -> do +mkSymbolToFieldInstances :: MkPersistSettings -> EntityMap -> UnboundEntityDef -> Q [Dec] +mkSymbolToFieldInstances mps entityMap (fixEntityDef -> ed) = do + let + entityHaskellName = + getEntityHaskellName $ unboundEntityDef ed + allFields = + getUnboundFieldDefs ed + mkEntityFieldConstr fieldHaskellName = + conE $ filterConName' mps entityHaskellName fieldHaskellName + :: Q Exp + regularFields <- forM (toList allFields) $ \fieldDef -> do + let + fieldHaskellName = + unboundFieldNameHS fieldDef + let fieldNameT :: Q Type fieldNameT = litT $ strTyLit $ T.unpack $ lowerFirstIfId - $ unFieldNameHS $ fieldHaskell fieldDef + $ unFieldNameHS fieldHaskellName lowerFirstIfId "Id" = "id" lowerFirstIfId xs = xs - nameG = mkEntityDefGenericName ed - - recordNameT - | mpsGeneric mps = - conT nameG `appT` varT backendName + fieldTypeT + | fieldHaskellName == FieldNameHS "Id" = + conT ''Key `appT` recordNameT | otherwise = - entityDefConT ed - - fieldTypeT = - maybeIdType mps fieldDef Nothing Nothing + pure $ maybeIdType mps entityMap fieldDef Nothing Nothing entityFieldConstr = - conE $ filterConName mps ed fieldDef - :: Q Exp + mkEntityFieldConstr fieldHaskellName + mkInstance fieldNameT fieldTypeT entityFieldConstr + + mkey <- + case unboundPrimarySpec ed of + NaturalKey _ -> + pure [] + _ -> do + let + fieldHaskellName = + FieldNameHS "Id" + entityFieldConstr = + mkEntityFieldConstr fieldHaskellName + fieldTypeT = + conT ''Key `appT` recordNameT + mkInstance [t|"id"|] fieldTypeT entityFieldConstr + + pure (mkey <> join regularFields) + where + nameG = + mkEntityDefGenericName ed + recordNameT + | mpsGeneric mps = + conT nameG `appT` varT backendName + | otherwise = + entityDefConT ed + mkInstance fieldNameT fieldTypeT entityFieldConstr = [d| - instance SymbolToField $(fieldNameT) $(recordNameT) $(pure fieldTypeT) where + instance SymbolToField $(fieldNameT) $(recordNameT) $(fieldTypeT) where symbolToField = $(entityFieldConstr) |] + -- | Pass in a list of lists of extensions, where any of the given -- extensions will satisfy it. For example, you might need either GADTs or -- ExistentialQuantification, so you'd write: @@ -1956,18 +2675,18 @@ requireExtensions requiredExtensions = do ] extensions -> fail $ mconcat [ "Generating Persistent entities now requires the following language extensions:\n\n" - , List.intercalate "\n" (map show extensions) + , List.intercalate "\n" (fmap show extensions) , "\n\nPlease enable the extensions by copy/pasting these lines into the top of your file:\n\n" - , List.intercalate "\n" (map extensionToPragma extensions) + , List.intercalate "\n" (fmap extensionToPragma extensions) ] where extensionToPragma ext = "{-# LANGUAGE " <> show ext <> " #-}" -- | creates a TH Name for use in the ToJSON instance -fieldToJSONValName :: FieldDef -> Q Name +fieldToJSONValName :: UnboundFieldDef -> Q Name fieldToJSONValName = - newName . T.unpack . unFieldNameHSForJSON . fieldHaskell + newName . T.unpack . unFieldNameHSForJSON . unboundFieldNameHS -- | This special-cases "type_" and strips out its underscore. When -- used for JSON serialization and deserialization, it works around @@ -1979,13 +2698,13 @@ unFieldNameHSForJSON = fixTypeUnderscore . unFieldNameHS "type" -> "type_" name -> name -entityDefConK :: EntityDef -> Kind +entityDefConK :: UnboundEntityDef -> Kind entityDefConK = conK . mkEntityDefName -entityDefConT :: EntityDef -> Q Type +entityDefConT :: UnboundEntityDef -> Q Type entityDefConT = pure . entityDefConK -entityDefConE :: EntityDef -> Exp +entityDefConE :: UnboundEntityDef -> Exp entityDefConE = ConE . mkEntityDefName -- | creates a TH Name for an entity's field, based on the entity @@ -1995,18 +2714,18 @@ entityDefConE = ConE . mkEntityDefName -- name Text -- -- This would generate `customerName` as a TH Name -fieldNameToRecordName :: MkPersistSettings -> EntityDef -> FieldNameHS -> Name +fieldNameToRecordName :: MkPersistSettings -> UnboundEntityDef -> FieldNameHS -> Name fieldNameToRecordName mps entDef fieldName = - mkRecordName mps mUnderscore (entityHaskell entDef) fieldName + mkRecordName mps mUnderscore (entityHaskell (unboundEntityDef entDef)) fieldName where mUnderscore | mpsGenerateLenses mps = Just "_" | otherwise = Nothing -- | as above, only takes a `FieldDef` -fieldDefToRecordName :: MkPersistSettings -> EntityDef -> FieldDef -> Name +fieldDefToRecordName :: MkPersistSettings -> UnboundEntityDef -> UnboundFieldDef -> Name fieldDefToRecordName mps entDef fieldDef = - fieldNameToRecordName mps entDef (fieldHaskell fieldDef) + fieldNameToRecordName mps entDef (unboundFieldNameHS fieldDef) -- | creates a TH Name for a lens on an entity's field, based on the entity -- name and the field name, so as above but for the Lens @@ -2017,9 +2736,9 @@ fieldDefToRecordName mps entDef fieldDef = -- Generates a lens `customerName` when `mpsGenerateLenses` is true -- while `fieldNameToRecordName` generates a prefixed function -- `_customerName` -mkEntityLensName :: MkPersistSettings -> EntityDef -> FieldDef -> Name +mkEntityLensName :: MkPersistSettings -> UnboundEntityDef -> UnboundFieldDef -> Name mkEntityLensName mps entDef fieldDef = - mkRecordName mps Nothing (entityHaskell entDef) (fieldHaskell fieldDef) + mkRecordName mps Nothing (entityHaskell (unboundEntityDef entDef)) (unboundFieldNameHS fieldDef) mkRecordName :: MkPersistSettings -> Maybe Text -> EntityNameHS -> FieldNameHS -> Name mkRecordName mps prefix entNameHS fieldNameHS = @@ -2039,11 +2758,15 @@ mkRecordName mps prefix entNameHS fieldNameHS = unFieldNameHS fieldNameHS -- | Construct a list of TH Names for the typeclasses of an EntityDef's `entityDerives` -mkEntityDefDeriveNames :: MkPersistSettings -> EntityDef -> [Name] +mkEntityDefDeriveNames :: MkPersistSettings -> UnboundEntityDef -> [Name] mkEntityDefDeriveNames mps entDef = - let entityInstances = mkName . T.unpack <$> entityDerives entDef - additionalInstances = filter (`notElem` entityInstances) $ mpsDeriveInstances mps - in entityInstances <> additionalInstances + let + entityInstances = + mkName . T.unpack <$> entityDerives (unboundEntityDef entDef) + additionalInstances = + filter (`notElem` entityInstances) $ mpsDeriveInstances mps + in + entityInstances <> additionalInstances -- | Make a TH Name for the EntityDef's Haskell type mkEntityNameHSName :: EntityNameHS -> Name @@ -2051,44 +2774,57 @@ mkEntityNameHSName = mkName . T.unpack . unEntityNameHS -- | As above only taking an `EntityDef` -mkEntityDefName :: EntityDef -> Name +mkEntityDefName :: UnboundEntityDef -> Name mkEntityDefName = - mkEntityNameHSName . entityHaskell + mkEntityNameHSName . entityHaskell . unboundEntityDef -- | Make a TH Name for the EntityDef's Haskell type, when using mpsGeneric -mkEntityDefGenericName :: EntityDef -> Name +mkEntityDefGenericName :: UnboundEntityDef -> Name mkEntityDefGenericName = - mkEntityNameHSGenericName . entityHaskell + mkEntityNameHSGenericName . entityHaskell . unboundEntityDef mkEntityNameHSGenericName :: EntityNameHS -> Name mkEntityNameHSGenericName name = mkName $ T.unpack (unEntityNameHS name <> "Generic") -sumConstrName :: MkPersistSettings -> EntityDef -> FieldDef -> Name -sumConstrName mps entDef FieldDef {..} = mkName $ T.unpack name - where - name - | mpsPrefixFields mps = modifiedName ++ "Sum" - | otherwise = fieldName ++ "Sum" - modifiedName = mpsConstraintLabelModifier mps entityName fieldName - entityName = unEntityNameHS $ entityHaskell entDef - fieldName = upperFirst $ unFieldNameHS fieldHaskell +-- needs: +-- +-- * entityHaskell +-- * field on EntityDef +-- * fieldHaskell +-- * field on FieldDef +-- +sumConstrName :: MkPersistSettings -> UnboundEntityDef -> UnboundFieldDef -> Name +sumConstrName mps entDef unboundFieldDef = + mkName $ T.unpack name + where + name + | mpsPrefixFields mps = modifiedName ++ "Sum" + | otherwise = fieldName ++ "Sum" + fieldNameHS = + unboundFieldNameHS unboundFieldDef + modifiedName = + mpsConstraintLabelModifier mps entityName fieldName + entityName = + unEntityNameHS $ getUnboundEntityNameHS entDef + fieldName = + upperFirst $ unFieldNameHS fieldNameHS -- | Turn a ConstraintName into a TH Name mkConstraintName :: ConstraintNameHS -> Name mkConstraintName (ConstraintNameHS name) = mkName (T.unpack name) -keyIdName :: EntityDef -> Name +keyIdName :: UnboundEntityDef -> Name keyIdName = mkName . T.unpack . keyIdText -keyIdText :: EntityDef -> Text -keyIdText entDef = unEntityNameHS (entityHaskell entDef) `mappend` "Id" +keyIdText :: UnboundEntityDef -> Text +keyIdText entDef = unEntityNameHS (getUnboundEntityNameHS entDef) `mappend` "Id" -unKeyName :: EntityDef -> Name +unKeyName :: UnboundEntityDef -> Name unKeyName entDef = mkName $ T.unpack $ "un" `mappend` keyText entDef -unKeyExp :: EntityDef -> Exp +unKeyExp :: UnboundEntityDef -> Exp unKeyExp = VarE . unKeyName backendT :: Type @@ -2097,29 +2833,51 @@ backendT = VarT backendName backendName :: Name backendName = mkName "backend" -keyConName :: EntityDef -> Name -keyConName entDef = mkName $ T.unpack $ resolveConflict $ keyText entDef +-- needs: +-- +-- * keyText +-- * entityNameHaskell +-- * fields +-- * fieldHaskell +-- +-- keyConName :: EntityNameHS -> [FieldHaskell] -> Name +keyConName :: UnboundEntityDef -> Name +keyConName entDef = + keyConName' + (getUnboundEntityNameHS entDef) + (unboundFieldNameHS <$> unboundEntityFields (entDef)) + + +keyConName' :: EntityNameHS -> [FieldNameHS] -> Name +keyConName' entName entFields = mkName $ T.unpack $ resolveConflict $ keyText' entName where resolveConflict kn = if conflict then kn `mappend` "'" else kn - conflict = any ((== FieldNameHS "key") . fieldHaskell) $ entityFields entDef + conflict = any (== FieldNameHS "key") entFields + +-- keyConExp :: EntityNameHS -> [FieldNameHS] -> Exp +keyConExp :: UnboundEntityDef -> Exp +keyConExp ed = ConE $ keyConName ed -keyConExp :: EntityDef -> Exp -keyConExp = ConE . keyConName +keyText :: UnboundEntityDef -> Text +keyText entDef = unEntityNameHS (getUnboundEntityNameHS entDef) ++ "Key" -keyText :: EntityDef -> Text -keyText entDef = unEntityNameHS (entityHaskell entDef) ++ "Key" +keyText' :: EntityNameHS -> Text +keyText' entName = unEntityNameHS entName ++ "Key" -keyFieldName :: MkPersistSettings -> EntityDef -> FieldDef -> Name +keyFieldName :: MkPersistSettings -> UnboundEntityDef -> FieldNameHS -> Name keyFieldName mps entDef fieldDef - | pkNewtype mps entDef = unKeyName entDef - | otherwise = mkName $ T.unpack $ lowerFirst (keyText entDef) `mappend` unFieldNameHS (fieldHaskell fieldDef) + | pkNewtype mps entDef = + unKeyName entDef + | otherwise = + mkName $ T.unpack $ lowerFirst (keyText entDef) `mappend` unFieldNameHS fieldDef filterConName :: MkPersistSettings - -> EntityDef - -> FieldDef + -> UnboundEntityDef + -> UnboundFieldDef -> Name -filterConName mps entity field = filterConName' mps (entityHaskell entity) (fieldHaskell field) +filterConName mps (unboundEntityDef -> entity) field = + filterConName' mps (entityHaskell entity) (unboundFieldNameHS field) filterConName' :: MkPersistSettings @@ -2136,3 +2894,94 @@ filterConName' mps entity field = mkName $ T.unpack name modifiedName = mpsConstraintLabelModifier mps entityName fieldName entityName = unEntityNameHS entity fieldName = upperFirst $ unFieldNameHS field + +-- | Splice in a list of all 'EntityDef' in scope. This is useful when running +-- 'mkPersist' to ensure that all entity definitions are available for setting +-- foreign keys, and for performing migrations with all entities available. +-- +-- 'mkPersist' has the type @MkPersistSettings -> [EntityDef] -> DecsQ@. So, to +-- account for entities defined elsewhere, you'll @mappend $(discoverEntities)@. +-- +-- For example, +-- +-- @ +-- share +-- [ mkPersistWith sqlSettings $(discoverEntities) +-- ] +-- [persistLowerCase| ... |] +-- @ +-- +-- Likewise, to run migrations with all entity instances in scope, you'd write: +-- +-- @ +-- migrateAll = migrateModels $(discoverEntities) +-- @ +-- +-- Note that there is some odd behavior with Template Haskell and splicing +-- groups. If you call 'discoverEntities' in the same module that defines +-- 'PersistEntity' instances, you need to ensure they are in different top-level +-- binding groups. You can write @$(pure [])@ at the top level to do this. +-- +-- @ +-- -- Foo and Bar both export an instance of PersistEntity +-- import Foo +-- import Bar +-- +-- -- Since Foo and Bar are both imported, discoverEntities can find them here. +-- mkPersistWith sqlSettings $(discoverEntities) [persistLowerCase| +-- User +-- name Text +-- age Int +-- |] +-- +-- -- onlyFooBar is defined in the same 'top level group' as the above generated +-- -- instance for User, so it isn't present in this list. +-- onlyFooBar :: [EntityDef] +-- onlyFooBar = $(discoverEntities) +-- +-- -- We can manually create a new binding group with this, which splices an +-- -- empty list of declarations in. +-- $(pure []) +-- +-- -- fooBarUser is able to see the 'User' instance. +-- fooBarUser :: [EntityDef] +-- fooBarUser = $(discoverEntities) +-- @ +-- +-- @since 2.13.0.0 +discoverEntities :: Q Exp +discoverEntities = do + instances <- reifyInstances ''PersistEntity [VarT (mkName "a")] + let + types = + mapMaybe getDecType instances + getDecType dec = + case dec of + InstanceD _moverlap _cxt typ _decs -> + stripPersistEntity typ + _ -> + Nothing + stripPersistEntity typ = + case typ of + AppT (ConT tyName) t | tyName == ''PersistEntity -> + Just t + _ -> + Nothing + + fmap ListE $ + forM types $ \typ -> do + [e| entityDef (Proxy :: Proxy $(pure typ)) |] + +setNull :: NonEmpty UnboundFieldDef -> Bool +setNull (fd :| fds) = + let + nullSetting = + isNull fd + isNull = + (NotNullable /=) . nullable . unboundFieldAttrs + in + if all ((nullSetting ==) . isNull) fds + then nullSetting + else error $ + "foreign key columns must all be nullable or non-nullable" + ++ show (fmap (unFieldNameHS . unboundFieldNameHS) (fd:fds)) diff --git a/persistent/Database/Persist/Types.hs b/persistent/Database/Persist/Types.hs index 4625c2dc1..1ef488c84 100644 --- a/persistent/Database/Persist/Types.hs +++ b/persistent/Database/Persist/Types.hs @@ -1,5 +1,9 @@ module Database.Persist.Types ( module Database.Persist.Types.Base + , module Database.Persist.Names + , module Database.Persist.EntityDef + , module Database.Persist.FieldDef + , module Database.Persist.PersistValue , SomePersistField (..) , Update (..) , BackendSpecificUpdate @@ -12,6 +16,43 @@ module Database.Persist.Types , OverflowNatural(..) ) where -import Database.Persist.Types.Base +import Database.Persist.Names import Database.Persist.Class.PersistField import Database.Persist.Class.PersistEntity +import Database.Persist.EntityDef +import Database.Persist.FieldDef +import Database.Persist.PersistValue + +-- this module is a bit of a kitchen sink of types and concepts. the guts of +-- persistent, just strewn across the table. in 2.13 let's get this cleaned up +-- and a bit more tidy. +import Database.Persist.Types.Base + ( FieldCascade(..) + , ForeignDef(..) + , CascadeAction(..) + , FieldDef(..) + , UniqueDef(..) + , FieldAttr(..) + , IsNullable(..) + , WhyNullable(..) + , ExtraLine + , Checkmark(..) + , FieldType(..) + , PersistException(..) + , ForeignFieldDef + , Attr + , CompositeDef(..) + , SqlType(..) + , ReferenceDef(..) + , noCascade + , parseFieldAttrs + , keyAndEntityFields + , PersistException(..) + , UpdateException(..) + , PersistValue(..) + , PersistFilter(..) + , PersistUpdate(..) + , EmbedEntityDef(..) + , EmbedFieldDef(..) + , LiteralType(..) + ) diff --git a/persistent/Database/Persist/Types/Base.hs b/persistent/Database/Persist/Types/Base.hs index 14ed127a5..a10add26d 100644 --- a/persistent/Database/Persist/Types/Base.hs +++ b/persistent/Database/Persist/Types/Base.hs @@ -1,47 +1,44 @@ -{-# LANGUAGE CPP #-} -{-# LANGUAGE LambdaCase, PatternSynonyms #-} +{-# LANGUAGE AllowAmbiguousTypes #-} {-# LANGUAGE DeriveLift #-} -{-# OPTIONS_GHC -fno-warn-deprecations #-} -- usage of Error typeclass +{-# LANGUAGE LambdaCase #-} +{-# LANGUAGE PatternSynonyms #-} +{-# LANGUAGE PolyKinds #-} +{-# LANGUAGE RankNTypes #-} +{-# LANGUAGE ScopedTypeVariables #-} +{-# LANGUAGE TypeApplications #-} + module Database.Persist.Types.Base ( module Database.Persist.Types.Base - , PersistValue(.., PersistLiteral, PersistLiteralEscaped, PersistDbSpecific) + -- * Re-exports + , PersistValue(..) + , fromPersistValueText , LiteralType(..) ) where -import Control.Arrow (second) import Control.Exception (Exception) -import Control.Monad.Trans.Error (Error (..)) -import qualified Data.Aeson as A -import Data.Bits (shiftL, shiftR) -import Data.ByteString (ByteString, foldl') -import qualified Data.ByteString as BS -import qualified Data.ByteString.Base64 as B64 -import qualified Data.ByteString.Char8 as BS8 import Data.Char (isSpace) -import qualified Data.HashMap.Strict as HM -import Data.Int (Int64) +import Data.List.NonEmpty (NonEmpty(..)) +import qualified Data.List.NonEmpty as NEL import Data.Map (Map) -import Data.Maybe ( isNothing ) -#if !MIN_VERSION_base(4,11,0) --- This can be removed when GHC < 8.2.2 isn't supported anymore -import Data.Semigroup ((<>)) -#endif -import qualified Data.Scientific -import Data.Text (Text, pack) +import Data.Maybe (isNothing) +import Data.Text (Text) import qualified Data.Text as T -import qualified Data.Text.Encoding as TE -import Data.Text.Encoding.Error (lenientDecode) -import Data.Time (Day, TimeOfDay, UTCTime) -import qualified Data.Vector as V import Data.Word (Word32) -import Numeric (showHex, readHex) -import Web.PathPieces (PathPiece(..)) -import Web.HttpApiData (ToHttpApiData (..), FromHttpApiData (..), parseUrlPieceMaybe, showTextData, readTextData, parseBoundedTextData) import Language.Haskell.TH.Syntax (Lift(..)) +import Web.HttpApiData + ( FromHttpApiData(..) + , ToHttpApiData(..) + , parseBoundedTextData + , showTextData + ) +import Web.PathPieces (PathPiece(..)) -- Bring `Lift (Map k v)` instance into scope, as well as `Lift Text` -- instance on pre-1.2.4 versions of `text` import Instances.TH.Lift () +import Database.Persist.Names +import Database.Persist.PersistValue + -- | A 'Checkmark' should be used as a field type whenever a -- uniqueness constraint should guarantee that a certain kind of -- record may appear at most once, but other kinds of records may @@ -106,10 +103,10 @@ instance PathPiece Checkmark where fromPathPiece "inactive" = Just Inactive fromPathPiece _ = Nothing -data IsNullable = Nullable !WhyNullable - | NotNullable - deriving (Eq, Show) - +data IsNullable + = Nullable !WhyNullable + | NotNullable + deriving (Eq, Show) -- | The reason why a field is 'nullable' is very important. A -- field that is nullable because of a @Maybe@ tag will have its @@ -120,29 +117,6 @@ data WhyNullable = ByMaybeAttr | ByNullableAttr deriving (Eq, Show) --- | Convenience operations for working with '-NameDB' types. --- --- @since 2.12.0.0 -class DatabaseName a where - escapeWith :: (Text -> str) -> (a -> str) - --- | An 'EntityNameDB' represents the datastore-side name that @persistent@ --- will use for an entity. --- --- @since 2.12.0.0 -newtype EntityNameDB = EntityNameDB { unEntityNameDB :: Text } - deriving (Show, Eq, Read, Ord, Lift) - -instance DatabaseName EntityNameDB where - escapeWith f (EntityNameDB n) = f n - --- | An 'EntityNameHS' represents the Haskell-side name that @persistent@ --- will use for an entity. --- --- @since 2.12.0.0 -newtype EntityNameHS = EntityNameHS { unEntityNameHS :: Text } - deriving (Show, Eq, Read, Ord, Lift) - -- | An 'EntityDef' represents the information that @persistent@ knows -- about an Entity. It uses this information to generate the Haskell -- datatype, the SQL migrations, and other relevant conversions. @@ -151,7 +125,7 @@ data EntityDef = EntityDef -- ^ The name of the entity as Haskell understands it. , entityDB :: !EntityNameDB -- ^ The name of the database table corresponding to the entity. - , entityId :: !FieldDef + , entityId :: !EntityIdDef -- ^ The entity's primary key or identifier. , entityAttrs :: ![Attr] -- ^ The @persistent@ entity syntax allows you to add arbitrary 'Attr's @@ -178,29 +152,64 @@ data EntityDef = EntityDef } deriving (Show, Eq, Read, Ord, Lift) -entitiesPrimary :: EntityDef -> Maybe [FieldDef] -entitiesPrimary t = case fieldReference primaryField of - CompositeRef c -> Just $ compositeFields c - ForeignRef _ _ -> Just [primaryField] - _ -> Nothing - where - primaryField = entityId t - -entityPrimary :: EntityDef -> Maybe CompositeDef -entityPrimary t = case fieldReference (entityId t) of - CompositeRef c -> Just c - _ -> Nothing +-- | The definition for the entity's primary key ID. +-- +-- @since 2.13.0.0 +data EntityIdDef + = EntityIdField !FieldDef + -- ^ The entity has a single key column, and it is a surrogate key - that + -- is, you can't go from @rec -> Key rec@. + -- + -- @since 2.13.0.0 + | EntityIdNaturalKey !CompositeDef + -- ^ The entity has a natural key. This means you can write @rec -> Key rec@ + -- because all the key fields are present on the datatype. + -- + -- A natural key can have one or more columns. + -- + -- @since 2.13.0.0 + deriving (Show, Eq, Read, Ord, Lift) -entityKeyFields :: EntityDef -> [FieldDef] -entityKeyFields ent = - maybe [entityId ent] compositeFields $ entityPrimary ent +-- | Return the @['FieldDef']@ for the entity keys. +entitiesPrimary :: EntityDef -> NonEmpty FieldDef +entitiesPrimary t = + case entityId t of + EntityIdNaturalKey fds -> + compositeFields fds + EntityIdField fd -> + pure fd -keyAndEntityFields :: EntityDef -> [FieldDef] +entityPrimary :: EntityDef -> Maybe CompositeDef +entityPrimary t = + case entityId t of + EntityIdNaturalKey c -> + Just c + _ -> + Nothing + +entityKeyFields :: EntityDef -> NonEmpty FieldDef +entityKeyFields = + entitiesPrimary + +-- | Returns a 'NonEmpty' list of 'FieldDef' that correspond with the key +-- columns for an 'EntityDef'. +keyAndEntityFields :: EntityDef -> NonEmpty FieldDef keyAndEntityFields ent = - case entityPrimary ent of - Nothing -> entityId ent : entityFields ent - Just _ -> entityFields ent - + case entityId ent of + EntityIdField fd -> + fd :| fields + EntityIdNaturalKey _ -> + case NEL.nonEmpty fields of + Nothing -> + error $ mconcat + [ "persistent internal guarantee failed: entity is " + , "defined with an entityId = EntityIdNaturalKey, " + , "but somehow doesn't have any entity fields." + ] + Just xs -> + xs + where + fields = filter isHaskellField $ entityFields ent type ExtraLine = [Text] @@ -215,16 +224,146 @@ type Attr = Text -- @since 2.11.0.0 data FieldAttr = FieldAttrMaybe + -- ^ The 'Maybe' keyword goes after the type. This indicates that the column + -- is nullable, and the generated Haskell code will have a @'Maybe'@ type + -- for it. + -- + -- Example: + -- + -- @ + -- User + -- name Text Maybe + -- @ | FieldAttrNullable + -- ^ This indicates that the column is nullable, but should not have + -- a 'Maybe' type. For this to work out, you need to ensure that the + -- 'PersistField' instance for the type in question can support + -- a 'PersistNull' value. + -- + -- @ + -- data What = NoWhat | Hello Text + -- + -- instance PersistField What where + -- fromPersistValue PersistNull = + -- pure NoWhat + -- fromPersistValue pv = + -- Hello <$> fromPersistValue pv + -- + -- instance PersistFieldSql What where + -- sqlType _ = SqlString + -- + -- User + -- what What nullable + -- @ | FieldAttrMigrationOnly + -- ^ This tag means that the column will not be present on the Haskell code, + -- but will not be removed from the database. Useful to deprecate fields in + -- phases. + -- + -- You should set the column to be nullable in the database. Otherwise, + -- inserts won't have values. + -- + -- @ + -- User + -- oldName Text MigrationOnly + -- newName Text + -- @ | FieldAttrSafeToRemove + -- ^ A @SafeToRemove@ attribute is not present on the Haskell datatype, and + -- the backend migrations should attempt to drop the column without + -- triggering any unsafe migration warnings. + -- + -- Useful after you've used @MigrationOnly@ to remove a column from the + -- database in phases. + -- + -- @ + -- User + -- oldName Text SafeToRemove + -- newName Text + -- @ | FieldAttrNoreference + -- ^ This attribute indicates that we should create a foreign key reference + -- from a column. By default, @persistent@ will try and create a foreign key + -- reference for a column if it can determine that the type of the column is + -- a @'Key' entity@ or an @EntityId@ and the @Entity@'s name was present in + -- 'mkPersist'. + -- + -- This is useful if you want to use the explicit foreign key syntax. + -- + -- @ + -- Post + -- title Text + -- + -- Comment + -- postId PostId noreference + -- Foreign Post fk_comment_post postId + -- @ | FieldAttrReference Text + -- ^ This is set to specify precisely the database table the column refers + -- to. + -- + -- @ + -- Post + -- title Text + -- + -- Comment + -- postId PostId references="post" + -- @ + -- + -- You should not need this - @persistent@ should be capable of correctly + -- determining the target table's name. If you do need this, please file an + -- issue describing why. | FieldAttrConstraint Text + -- ^ Specify a name for the constraint on the foreign key reference for this + -- table. + -- + -- @ + -- Post + -- title Text + -- + -- Comment + -- postId PostId constraint="my_cool_constraint_name" + -- @ | FieldAttrDefault Text + -- ^ Specify the default value for a column. + -- + -- @ + -- User + -- createdAt UTCTime default="NOW()" + -- @ + -- + -- Note that a @default=@ attribute does not mean you can omit the value + -- while inserting. | FieldAttrSqltype Text + -- ^ Specify a custom SQL type for the column. Generally, you should define + -- a custom datatype with a custom 'PersistFieldSql' instance instead of + -- using this. + -- + -- @ + -- User + -- uuid Text sqltype="UUID" + -- @ | FieldAttrMaxlen Integer + -- ^ Set a maximum length for a column. Useful for VARCHAR and indexes. + -- + -- @ + -- User + -- name Text maxlen=200 + -- + -- UniqueName name + -- @ + | FieldAttrSql Text + -- ^ Specify the database name of the column. + -- + -- @ + -- User + -- blarghle Int sql="b_l_a_r_g_h_l_e" + -- @ + -- + -- Useful for performing phased migrations, where one column is renamed to + -- another column over time. | FieldAttrOther Text + -- ^ A grab bag of random attributes that were unrecognized by the parser. deriving (Show, Eq, Read, Ord, Lift) -- | Parse raw field attributes into structured form. Any unrecognized @@ -247,6 +386,8 @@ parseFieldAttrs = fmap $ \case | Just x <- T.stripPrefix "maxlen=" raw -> case reads (T.unpack x) of [(n, s)] | all isSpace s -> FieldAttrMaxlen n _ -> error $ "Could not parse maxlen field with value " <> show raw + | Just x <- T.stripPrefix "sql=" raw -> + FieldAttrSql x | otherwise -> FieldAttrOther raw -- | A 'FieldType' describes a field parsed from the QuasiQuoter and is @@ -269,68 +410,6 @@ data FieldType | FTList FieldType deriving (Show, Eq, Read, Ord, Lift) --- | An 'EntityNameDB' represents the datastore-side name that @persistent@ --- will use for an entity. --- --- @since 2.12.0.0 -newtype FieldNameDB = FieldNameDB { unFieldNameDB :: Text } - deriving (Show, Eq, Read, Ord, Lift) - --- | @since 2.12.0.0 -instance DatabaseName FieldNameDB where - escapeWith f (FieldNameDB n) = f n - --- | A 'FieldNameHS' represents the Haskell-side name that @persistent@ --- will use for a field. --- --- @since 2.12.0.0 -newtype FieldNameHS = FieldNameHS { unFieldNameHS :: Text } - deriving (Show, Eq, Read, Ord, Lift) - --- | A 'FieldDef' represents the inormation that @persistent@ knows about --- a field of a datatype. This includes information used to parse the field --- out of the database and what the field corresponds to. -data FieldDef = FieldDef - { fieldHaskell :: !FieldNameHS - -- ^ The name of the field. Note that this does not corresponds to the - -- record labels generated for the particular entity - record labels - -- are generated with the type name prefixed to the field, so - -- a 'FieldDef' that contains a @'FieldNameHS' "name"@ for a type - -- @User@ will have a record field @userName@. - , fieldDB :: !FieldNameDB - -- ^ The name of the field in the database. For SQL databases, this - -- corresponds to the column name. - , fieldType :: !FieldType - -- ^ The type of the field in Haskell. - , fieldSqlType :: !SqlType - -- ^ The type of the field in a SQL database. - , fieldAttrs :: ![FieldAttr] - -- ^ User annotations for a field. These are provided with the @!@ - -- operator. - , fieldStrict :: !Bool - -- ^ If this is 'True', then the Haskell datatype will have a strict - -- record field. The default value for this is 'True'. - , fieldReference :: !ReferenceDef - , fieldCascade :: !FieldCascade - -- ^ Defines how operations on the field cascade on to the referenced - -- tables. This doesn't have any meaning if the 'fieldReference' is set - -- to 'NoReference' or 'SelfReference'. The cascade option here should - -- be the same as the one obtained in the 'fieldReference'. - -- - -- @since 2.11.0 - , fieldComments :: !(Maybe Text) - -- ^ Optional comments for a 'Field'. There is not currently a way to - -- attach comments to a field in the quasiquoter. - -- - -- @since 2.10.0 - , fieldGenerated :: !(Maybe Text) - -- ^ Whether or not the field is a @GENERATED@ column, and additionally - -- the expression to use for generation. - -- - -- @since 2.11.0.0 - } - deriving (Show, Eq, Read, Ord, Lift) - isFieldNotGenerated :: FieldDef -> Bool isFieldNotGenerated = isNothing . fieldGenerated @@ -338,95 +417,100 @@ isFieldNotGenerated = isNothing . fieldGenerated -- 1) composite (to fields that exist in the record) -- 2) single field -- 3) embedded -data ReferenceDef = NoReference - | ForeignRef !EntityNameHS !FieldType - -- ^ A ForeignRef has a late binding to the EntityDef it references via name and has the Haskell type of the foreign key in the form of FieldType - | EmbedRef EmbedEntityDef - | CompositeRef CompositeDef - | SelfReference - -- ^ A SelfReference stops an immediate cycle which causes non-termination at compile-time (issue #311). - deriving (Show, Eq, Read, Ord, Lift) +data ReferenceDef + = NoReference + | ForeignRef !EntityNameHS + -- ^ A ForeignRef has a late binding to the EntityDef it references via name + -- and has the Haskell type of the foreign key in the form of FieldType + | EmbedRef EntityNameHS + | CompositeRef CompositeDef + | SelfReference + -- ^ A SelfReference stops an immediate cycle which causes non-termination at compile-time (issue #311). + deriving (Show, Eq, Read, Ord, Lift) -- | An EmbedEntityDef is the same as an EntityDef -- But it is only used for fieldReference -- so it only has data needed for embedding data EmbedEntityDef = EmbedEntityDef - { embeddedHaskell :: !EntityNameHS - , embeddedFields :: ![EmbedFieldDef] + { embeddedHaskell :: EntityNameHS + , embeddedFields :: [EmbedFieldDef] } deriving (Show, Eq, Read, Ord, Lift) -- | An EmbedFieldDef is the same as a FieldDef -- But it is only used for embeddedFields -- so it only has data needed for embedding data EmbedFieldDef = EmbedFieldDef - { emFieldDB :: !FieldNameDB - , emFieldEmbed :: Maybe EmbedEntityDef - , emFieldCycle :: Maybe EntityNameHS - -- ^ 'emFieldEmbed' can create a cycle (issue #311) - -- when a cycle is detected, 'emFieldEmbed' will be Nothing - -- and 'emFieldCycle' will be Just + { emFieldDB :: FieldNameDB + , emFieldEmbed :: Maybe (Either SelfEmbed EntityNameHS) } deriving (Show, Eq, Read, Ord, Lift) +data SelfEmbed = SelfEmbed + deriving (Show, Eq, Read, Ord, Lift) + +-- | Returns 'True' if the 'FieldDef' does not have a 'MigrationOnly' or +-- 'SafeToRemove' flag from the QuasiQuoter. +-- +-- @since 2.13.0.0 +isHaskellField :: FieldDef -> Bool +isHaskellField fd = + FieldAttrMigrationOnly `notElem` fieldAttrs fd && + FieldAttrSafeToRemove `notElem` fieldAttrs fd + toEmbedEntityDef :: EntityDef -> EmbedEntityDef toEmbedEntityDef ent = embDef where embDef = EmbedEntityDef - { embeddedHaskell = entityHaskell ent - , embeddedFields = map toEmbedFieldDef $ entityFields ent - } + { embeddedHaskell = entityHaskell ent + , embeddedFields = + map toEmbedFieldDef + $ filter isHaskellField + $ entityFields ent + } toEmbedFieldDef :: FieldDef -> EmbedFieldDef toEmbedFieldDef field = - EmbedFieldDef { emFieldDB = fieldDB field - , emFieldEmbed = case fieldReference field of - EmbedRef em -> Just em - SelfReference -> Just embDef - _ -> Nothing - , emFieldCycle = case fieldReference field of - SelfReference -> Just $ entityHaskell ent - _ -> Nothing - } - --- | A 'ConstraintNameDB' represents the datastore-side name that @persistent@ --- will use for a constraint. --- --- @since 2.12.0.0 -newtype ConstraintNameDB = ConstraintNameDB { unConstraintNameDB :: Text } - deriving (Show, Eq, Read, Ord, Lift) - --- | @since 2.12.0.0 -instance DatabaseName ConstraintNameDB where - escapeWith f (ConstraintNameDB n) = f n - --- | An 'ConstraintNameHS' represents the Haskell-side name that @persistent@ --- will use for a constraint. + EmbedFieldDef + { emFieldDB = + fieldDB field + , emFieldEmbed = + case fieldReference field of + EmbedRef em -> + Just $ Right em + SelfReference -> Just $ Left SelfEmbed + _ -> Nothing + } + +-- | Type for storing the Uniqueness constraint in the Schema. Assume you have +-- the following schema with a uniqueness constraint: -- --- @since 2.12.0.0 -newtype ConstraintNameHS = ConstraintNameHS { unConstraintNameHS :: Text } - deriving (Show, Eq, Read, Ord, Lift) - --- Type for storing the Uniqueness constraint in the Schema. --- Assume you have the following schema with a uniqueness --- constraint: +-- @ -- Person -- name String -- age Int -- UniqueAge age +-- @ -- -- This will be represented as: --- UniqueDef (ConstraintNameHS (packPTH "UniqueAge")) --- (ConstraintNameDB (packPTH "unique_age")) [(FieldNameHS (packPTH "age"), FieldNameDB (packPTH "age"))] [] +-- +-- @ +-- UniqueDef +-- { uniqueHaskell = ConstraintNameHS (packPTH "UniqueAge") +-- , uniqueDBName = ConstraintNameDB (packPTH "unique_age") +-- , uniqueFields = [(FieldNameHS (packPTH "age"), FieldNameDB (packPTH "age"))] +-- , uniqueAttrs = [] +-- } +-- @ -- data UniqueDef = UniqueDef { uniqueHaskell :: !ConstraintNameHS , uniqueDBName :: !ConstraintNameDB - , uniqueFields :: ![(FieldNameHS, FieldNameDB)] + , uniqueFields :: !(NonEmpty (FieldNameHS, FieldNameDB)) , uniqueAttrs :: ![Attr] } deriving (Show, Eq, Read, Ord, Lift) data CompositeDef = CompositeDef - { compositeFields :: ![FieldDef] + { compositeFields :: !(NonEmpty FieldDef) , compositeAttrs :: ![Attr] } deriving (Show, Eq, Read, Ord, Lift) @@ -514,220 +598,6 @@ data PersistException deriving Show instance Exception PersistException -instance Error PersistException where - strMsg = PersistError . pack - --- | A raw value which can be stored in any backend and can be marshalled to --- and from a 'PersistField'. -data PersistValue - = PersistText Text - | PersistByteString ByteString - | PersistInt64 Int64 - | PersistDouble Double - | PersistRational Rational - | PersistBool Bool - | PersistDay Day - | PersistTimeOfDay TimeOfDay - | PersistUTCTime UTCTime - | PersistNull - | PersistList [PersistValue] - | PersistMap [(Text, PersistValue)] - | PersistObjectId ByteString -- ^ Intended especially for MongoDB backend - | PersistArray [PersistValue] -- ^ Intended especially for PostgreSQL backend for text arrays - | PersistLiteral_ LiteralType ByteString - -- ^ This constructor is used to specify some raw literal value for the - -- backend. The 'LiteralType' value specifies how the value should be - -- escaped. This can be used to make special, custom types avaialable - -- in the back end. - -- - -- @since 2.12.0.0 - deriving (Show, Read, Eq, Ord) - --- | A type that determines how a backend should handle the literal. --- --- @since 2.12.0.0 -data LiteralType - = Escaped - -- ^ The accompanying value will be escaped before inserting into the - -- database. This is the correct default choice to use. - -- - -- @since 2.12.0.0 - | Unescaped - -- ^ The accompanying value will not be escaped when inserting into the - -- database. This is potentially dangerous - use this with care. - -- - -- @since 2.12.0.0 - | DbSpecific - -- ^ The 'DbSpecific' constructor corresponds to the legacy - -- 'PersistDbSpecific' constructor. We need to keep this around because - -- old databases may have serialized JSON representations that - -- reference this. We don't want to break the ability of a database to - -- load rows. - -- - -- @since 2.12.0.0 - deriving (Show, Read, Eq, Ord) - --- | This pattern synonym used to be a data constructor for the --- 'PersistValue' type. It was changed to be a pattern so that JSON-encoded --- database values could be parsed into their corresponding values. You --- should not use this, and instead prefer to pattern match on --- `PersistLiteral_` directly. --- --- If you use this, it will overlap a patern match on the 'PersistLiteral_, --- 'PersistLiteral', and 'PersistLiteralEscaped' patterns. If you need to --- disambiguate between these constructors, pattern match on --- 'PersistLiteral_' directly. --- --- @since 2.12.0.0 -pattern PersistDbSpecific bs <- PersistLiteral_ _ bs where - PersistDbSpecific bs = PersistLiteral_ DbSpecific bs - --- | This pattern synonym used to be a data constructor on 'PersistValue', --- but was changed into a catch-all pattern synonym to allow backwards --- compatiblity with database types. See the documentation on --- 'PersistDbSpecific' for more details. --- --- @since 2.12.0.0 -pattern PersistLiteralEscaped bs <- PersistLiteral_ _ bs where - PersistLiteralEscaped bs = PersistLiteral_ Escaped bs - --- | This pattern synonym used to be a data constructor on 'PersistValue', --- but was changed into a catch-all pattern synonym to allow backwards --- compatiblity with database types. See the documentation on --- 'PersistDbSpecific' for more details. --- --- @since 2.12.0.0 -pattern PersistLiteral bs <- PersistLiteral_ _ bs where - PersistLiteral bs = PersistLiteral_ Unescaped bs - -{-# DEPRECATED PersistDbSpecific "Deprecated since 2.11 because of inconsistent escaping behavior across backends. The Postgres backend escapes these values, while the MySQL backend does not. If you are using this, please switch to 'PersistLiteral_' and provide a relevant 'LiteralType' for your conversion." #-} - -instance ToHttpApiData PersistValue where - toUrlPiece val = - case fromPersistValueText val of - Left e -> error $ T.unpack e - Right y -> y - -instance FromHttpApiData PersistValue where - parseUrlPiece input = - PersistInt64 <$> parseUrlPiece input - PersistList <$> readTextData input - PersistText <$> return input - where - infixl 3 - Left _ y = y - x _ = x - -instance PathPiece PersistValue where - toPathPiece = toUrlPiece - fromPathPiece = parseUrlPieceMaybe - -fromPersistValueText :: PersistValue -> Either Text Text -fromPersistValueText (PersistText s) = Right s -fromPersistValueText (PersistByteString bs) = - Right $ TE.decodeUtf8With lenientDecode bs -fromPersistValueText (PersistInt64 i) = Right $ T.pack $ show i -fromPersistValueText (PersistDouble d) = Right $ T.pack $ show d -fromPersistValueText (PersistRational r) = Right $ T.pack $ show r -fromPersistValueText (PersistDay d) = Right $ T.pack $ show d -fromPersistValueText (PersistTimeOfDay d) = Right $ T.pack $ show d -fromPersistValueText (PersistUTCTime d) = Right $ T.pack $ show d -fromPersistValueText PersistNull = Left "Unexpected null" -fromPersistValueText (PersistBool b) = Right $ T.pack $ show b -fromPersistValueText (PersistList _) = Left "Cannot convert PersistList to Text" -fromPersistValueText (PersistMap _) = Left "Cannot convert PersistMap to Text" -fromPersistValueText (PersistObjectId _) = Left "Cannot convert PersistObjectId to Text" -fromPersistValueText (PersistArray _) = Left "Cannot convert PersistArray to Text" -fromPersistValueText (PersistLiteral_ _ _) = Left "Cannot convert PersistLiteral to Text" - -instance A.ToJSON PersistValue where - toJSON (PersistText t) = A.String $ T.cons 's' t - toJSON (PersistByteString b) = A.String $ T.cons 'b' $ TE.decodeUtf8 $ B64.encode b - toJSON (PersistInt64 i) = A.Number $ fromIntegral i - toJSON (PersistDouble d) = A.Number $ Data.Scientific.fromFloatDigits d - toJSON (PersistRational r) = A.String $ T.pack $ 'r' : show r - toJSON (PersistBool b) = A.Bool b - toJSON (PersistTimeOfDay t) = A.String $ T.pack $ 't' : show t - toJSON (PersistUTCTime u) = A.String $ T.pack $ 'u' : show u - toJSON (PersistDay d) = A.String $ T.pack $ 'd' : show d - toJSON PersistNull = A.Null - toJSON (PersistList l) = A.Array $ V.fromList $ map A.toJSON l - toJSON (PersistMap m) = A.object $ map (second A.toJSON) m - toJSON (PersistLiteral_ litTy b) = - let encoded = TE.decodeUtf8 $ B64.encode b - prefix = - case litTy of - DbSpecific -> 'p' - Unescaped -> 'l' - Escaped -> 'e' - in - A.String $ T.cons prefix encoded - toJSON (PersistArray a) = A.Array $ V.fromList $ map A.toJSON a - toJSON (PersistObjectId o) = - A.toJSON $ showChar 'o' $ showHexLen 8 (bs2i four) $ showHexLen 16 (bs2i eight) "" - where - (four, eight) = BS8.splitAt 4 o - - -- taken from crypto-api - bs2i :: ByteString -> Integer - bs2i bs = foldl' (\i b -> (i `shiftL` 8) + fromIntegral b) 0 bs - {-# INLINE bs2i #-} - - -- showHex of n padded with leading zeros if necessary to fill d digits - -- taken from Data.BSON - showHexLen :: (Show n, Integral n) => Int -> n -> ShowS - showHexLen d n = showString (replicate (d - sigDigits n) '0') . showHex n where - sigDigits 0 = 1 - sigDigits n' = truncate (logBase (16 :: Double) $ fromIntegral n') + 1 - -instance A.FromJSON PersistValue where - parseJSON (A.String t0) = - case T.uncons t0 of - Nothing -> fail "Null string" - Just ('p', t) -> either (\_ -> fail "Invalid base64") (return . PersistDbSpecific) - $ B64.decode $ TE.encodeUtf8 t - Just ('l', t) -> either (\_ -> fail "Invalid base64") (return . PersistLiteral) - $ B64.decode $ TE.encodeUtf8 t - Just ('e', t) -> either (\_ -> fail "Invalid base64") (return . PersistLiteralEscaped) - $ B64.decode $ TE.encodeUtf8 t - Just ('s', t) -> return $ PersistText t - Just ('b', t) -> either (\_ -> fail "Invalid base64") (return . PersistByteString) - $ B64.decode $ TE.encodeUtf8 t - Just ('t', t) -> PersistTimeOfDay <$> readMay t - Just ('u', t) -> PersistUTCTime <$> readMay t - Just ('d', t) -> PersistDay <$> readMay t - Just ('r', t) -> PersistRational <$> readMay t - Just ('o', t) -> maybe - (fail "Invalid base64") - (return . PersistObjectId . i2bs (8 * 12) . fst) - $ headMay $ readHex $ T.unpack t - Just (c, _) -> fail $ "Unknown prefix: " ++ [c] - where - headMay [] = Nothing - headMay (x:_) = Just x - readMay t = - case reads $ T.unpack t of - (x, _):_ -> return x - [] -> fail "Could not read" - - -- taken from crypto-api - -- |@i2bs bitLen i@ converts @i@ to a 'ByteString' of @bitLen@ bits (must be a multiple of 8). - i2bs :: Int -> Integer -> BS.ByteString - i2bs l i = BS.unfoldr (\l' -> if l' < 0 then Nothing else Just (fromIntegral (i `shiftR` l'), l' - 8)) (l-8) - {-# INLINE i2bs #-} - - - parseJSON (A.Number n) = return $ - if fromInteger (floor n) == n - then PersistInt64 $ floor n - else PersistDouble $ fromRational $ toRational n - parseJSON (A.Bool b) = return $ PersistBool b - parseJSON A.Null = return PersistNull - parseJSON (A.Array a) = fmap PersistList (mapM A.parseJSON $ V.toList a) - parseJSON (A.Object o) = - fmap PersistMap $ mapM go $ HM.toList o - where - go (k, v) = (,) k <$> A.parseJSON v -- | A SQL data type. Naming attempts to reflect the underlying Haskell -- datatypes, eg SqlString instead of SqlVarchar. Different SQL databases may @@ -763,6 +633,55 @@ instance Show OnlyUniqueException where instance Exception OnlyUniqueException -data PersistUpdate = Assign | Add | Subtract | Multiply | Divide - | BackendSpecificUpdate T.Text +data PersistUpdate + = Assign | Add | Subtract | Multiply | Divide + | BackendSpecificUpdate T.Text deriving (Read, Show, Lift) + +-- | A 'FieldDef' represents the inormation that @persistent@ knows about +-- a field of a datatype. This includes information used to parse the field +-- out of the database and what the field corresponds to. +data FieldDef = FieldDef + { fieldHaskell :: !FieldNameHS + -- ^ The name of the field. Note that this does not corresponds to the + -- record labels generated for the particular entity - record labels + -- are generated with the type name prefixed to the field, so + -- a 'FieldDef' that contains a @'FieldNameHS' "name"@ for a type + -- @User@ will have a record field @userName@. + , fieldDB :: !FieldNameDB + -- ^ The name of the field in the database. For SQL databases, this + -- corresponds to the column name. + , fieldType :: !FieldType + -- ^ The type of the field in Haskell. + , fieldSqlType :: !SqlType + -- ^ The type of the field in a SQL database. + , fieldAttrs :: ![FieldAttr] + -- ^ User annotations for a field. These are provided with the @!@ + -- operator. + , fieldStrict :: !Bool + -- ^ If this is 'True', then the Haskell datatype will have a strict + -- record field. The default value for this is 'True'. + , fieldReference :: !ReferenceDef + , fieldCascade :: !FieldCascade + -- ^ Defines how operations on the field cascade on to the referenced + -- tables. This doesn't have any meaning if the 'fieldReference' is set + -- to 'NoReference' or 'SelfReference'. The cascade option here should + -- be the same as the one obtained in the 'fieldReference'. + -- + -- @since 2.11.0 + , fieldComments :: !(Maybe Text) + -- ^ Optional comments for a 'Field'. There is not currently a way to + -- attach comments to a field in the quasiquoter. + -- + -- @since 2.10.0 + , fieldGenerated :: !(Maybe Text) + -- ^ Whether or not the field is a @GENERATED@ column, and additionally + -- the expression to use for generation. + -- + -- @since 2.11.0.0 + , fieldIsImplicitIdColumn :: !Bool + -- ^ 'True' if the field is an implicit ID column. 'False' otherwise. + -- + -- @since 2.13.0.0 + } + deriving (Show, Eq, Read, Ord, Lift) diff --git a/persistent/persistent.cabal b/persistent/persistent.cabal index aa9f9239c..73f73c85f 100644 --- a/persistent/persistent.cabal +++ b/persistent/persistent.cabal @@ -1,5 +1,5 @@ name: persistent -version: 2.12.1.2 +version: 2.13.0.0 license: MIT license-file: LICENSE author: Michael Snoyman @@ -15,79 +15,104 @@ bug-reports: https://github.com/yesodweb/persistent/issues extra-source-files: ChangeLog.md README.md library - build-depends: base >= 4.9 && < 5 - , aeson >= 1.0 - , attoparsec - , base64-bytestring - , blaze-html >= 0.9 - , bytestring >= 0.10 - , conduit >= 1.2.12 - , containers >= 0.5 - , fast-logger >= 2.4 - , http-api-data >= 0.3 - , monad-logger >= 0.3.28 - , mtl - , path-pieces >= 0.2 - , resource-pool >= 0.2.3 - , resourcet >= 1.1.10 - , scientific - , silently - , template-haskell >= 2.11 && < 2.17 - , text >= 1.2 - , time >= 1.6 - , transformers >= 0.5 - , unliftio-core - , unliftio - , unordered-containers - , th-lift-instances >= 0.1.14 && < 0.2 - , vector + build-depends: + base >= 4.11.1.0 && < 5 + , aeson >= 1.0 + , attoparsec + , base64-bytestring + , blaze-html >= 0.9 + , bytestring >= 0.10 + , conduit >= 1.2.12 + , containers >= 0.5 + , fast-logger >= 2.4 + , http-api-data >= 0.3 + , lift-type >= 0.1.0.0 && < 0.2.0.0 + , monad-logger >= 0.3.28 + , mtl + , path-pieces >= 0.2 + , resource-pool >= 0.2.3 + , resourcet >= 1.1.10 + , scientific + , silently + , template-haskell >= 2.13 && < 2.18 + , text >= 1.2 + , th-lift-instances >= 0.1.14 && < 0.2 + , time >= 1.6 + , transformers >= 0.5 + , unliftio + , unliftio-core + , unordered-containers + , vector - default-extensions: FlexibleContexts - , MultiParamTypeClasses - , OverloadedStrings - , TypeFamilies + default-extensions: + FlexibleContexts + , MultiParamTypeClasses + , OverloadedStrings + , TypeFamilies + + exposed-modules: + Database.Persist + Database.Persist.Types + Database.Persist.Names + Database.Persist.PersistValue + Database.Persist.EntityDef + Database.Persist.EntityDef.Internal + Database.Persist.FieldDef + Database.Persist.FieldDef.Internal + Database.Persist.ImplicitIdDef + Database.Persist.ImplicitIdDef.Internal + Database.Persist.TH + + Database.Persist.Quasi + Database.Persist.Quasi.Internal + + Database.Persist.Sql + Database.Persist.Sql.Util + Database.Persist.Sql.Types.Internal - exposed-modules: Database.Persist - Database.Persist.Quasi - Database.Persist.TH - - Database.Persist.Types - Database.Persist.Class - Database.Persist.Sql - Database.Persist.Sql.Util - Database.Persist.Sql.Types.Internal - - other-modules: Database.Persist.Types.Base - Database.Persist.Class.DeleteCascade - Database.Persist.Class.PersistEntity - Database.Persist.Class.PersistQuery - Database.Persist.Class.PersistUnique - Database.Persist.Class.PersistConfig - Database.Persist.Class.PersistField - Database.Persist.Class.PersistStore - - Database.Persist.Sql.Migration - Database.Persist.Sql.Internal - Database.Persist.Sql.Types - Database.Persist.Sql.Raw - Database.Persist.Sql.Run - Database.Persist.Sql.Class - Database.Persist.Sql.Orphan.PersistQuery - Database.Persist.Sql.Orphan.PersistStore - Database.Persist.Sql.Orphan.PersistUnique + Database.Persist.SqlBackend + Database.Persist.SqlBackend.Internal + Database.Persist.SqlBackend.Internal.InsertSqlResult + Database.Persist.SqlBackend.Internal.IsolationLevel + Database.Persist.SqlBackend.Internal.Statement + Database.Persist.SqlBackend.Internal.MkSqlBackend + + Database.Persist.Class + Database.Persist.Class.DeleteCascade + Database.Persist.Class.PersistEntity + Database.Persist.Class.PersistQuery + Database.Persist.Class.PersistUnique + Database.Persist.Class.PersistConfig + Database.Persist.Class.PersistField + Database.Persist.Class.PersistStore + + other-modules: + Database.Persist.Types.Base + + Database.Persist.Sql.Migration + Database.Persist.Sql.Internal + Database.Persist.Sql.Types + Database.Persist.Sql.Raw + Database.Persist.Sql.Run + Database.Persist.Sql.Class + Database.Persist.Sql.Orphan.PersistQuery + Database.Persist.Sql.Orphan.PersistStore + Database.Persist.Sql.Orphan.PersistUnique -- These modules only make sense for compilers with access to DerivingVia if impl(ghc >= 8.6.1) - exposed-modules: Database.Persist.Compatible - other-modules: Database.Persist.Compatible.Types - Database.Persist.Compatible.TH + exposed-modules: + Database.Persist.Compatible + other-modules: + Database.Persist.Compatible.Types + Database.Persist.Compatible.TH ghc-options: -Wall default-language: Haskell2010 test-suite test type: exitcode-stdio-1.0 - main-is: test/main.hs + main-is: main.hs build-depends: base >= 4.9 && < 5 @@ -96,37 +121,36 @@ test-suite test , base64-bytestring , blaze-html , bytestring + , conduit , containers + , fast-logger , hspec >= 2.4 , http-api-data + , monad-logger + , mtl , path-pieces + , persistent + , QuickCheck + , quickcheck-instances >= 0.3 + , resource-pool + , resourcet , scientific , shakespeare + , silently + , template-haskell >= 2.4 , text + , th-lift-instances , time , transformers + , unliftio + , unliftio-core , unordered-containers , vector - , QuickCheck - -- needed because of the `source-dirs: .` - -- TODO: factor the internal modules out so we can use them in tests - -- maybe in another package - , template-haskell >= 2.4 - , unliftio-core - , mtl - , resourcet - , conduit - , monad-logger - , fast-logger - , resource-pool - , unliftio - , silently - , th-lift-instances hs-source-dirs: - ./ test/ - cpp-options: -DTEST + + ghc-options: -Wall default-extensions: FlexibleContexts , MultiParamTypeClasses @@ -134,16 +158,27 @@ test-suite test , TypeFamilies other-modules: - Database.Persist.Class.PersistEntity - Database.Persist.Class.PersistField - Database.Persist.Quasi - Database.Persist.Types - Database.Persist.Types.Base + Database.Persist.TH.EmbedSpec + Database.Persist.TH.ImplicitIdColSpec + Database.Persist.TH.MigrationOnlySpec + Database.Persist.TH.OverloadedLabelSpec + Database.Persist.TH.SharedPrimaryKeyImportedSpec + Database.Persist.TH.SharedPrimaryKeySpec + Database.Persist.TH.MultiBlockSpec + Database.Persist.TH.ForeignRefSpec + Database.Persist.TH.JsonEncodingSpec + Database.Persist.TH.MultiBlockSpec.Model + Database.Persist.TH.ToFromPersistValuesSpec Database.Persist.THSpec + Database.Persist.QuasiSpec + Database.Persist.ClassSpec + Database.Persist.PersistValueSpec TemplateTestImports Database.Persist.TH.SharedPrimaryKeySpec Database.Persist.TH.SharedPrimaryKeyImportedSpec Database.Persist.TH.OverloadedLabelSpec + Database.Persist.TH.ImplicitIdColSpec + Database.Persist.TH.DiscoverEntitiesSpec default-language: Haskell2010 source-repository head diff --git a/persistent/test/Database/Persist/ClassSpec.hs b/persistent/test/Database/Persist/ClassSpec.hs new file mode 100644 index 000000000..429b15b70 --- /dev/null +++ b/persistent/test/Database/Persist/ClassSpec.hs @@ -0,0 +1,16 @@ +module Database.Persist.ClassSpec where + +import Database.Persist.Class +import Data.Time +import Database.Persist.Types +import Test.Hspec + +spec :: Spec +spec = describe "Class" $ do + describe "PersistField" $ do + describe "UTCTime" $ do + it "fromPersistValue with format" $ + fromPersistValue (PersistText "2018-02-27 10:49:42.123") + `shouldBe` + Right (UTCTime (fromGregorian 2018 02 27) (timeOfDayToTime (TimeOfDay 10 49 42.123))) + diff --git a/persistent/test/Database/Persist/PersistValueSpec.hs b/persistent/test/Database/Persist/PersistValueSpec.hs new file mode 100644 index 000000000..a8ded1d27 --- /dev/null +++ b/persistent/test/Database/Persist/PersistValueSpec.hs @@ -0,0 +1,42 @@ +module Database.Persist.PersistValueSpec where + +import Test.Hspec +import Database.Persist.PersistValue +import Data.List.NonEmpty (NonEmpty(..), (<|)) +import qualified Data.Text as T +import Test.Hspec +import Test.Hspec.QuickCheck +import Test.QuickCheck +import Data.Aeson +import qualified Data.ByteString.Char8 as BS8 + + +spec :: Spec +spec = describe "PersistValueSpec" $ do + describe "PersistValue" $ do + describe "Aeson" $ do + let + testPrefix constr prefixChar bytes = + takePrefix (toJSON (constr (BS8.pack bytes))) + === + String (T.singleton prefixChar) + roundTrip constr bytes = + fromJSON (toJSON (constr (BS8.pack bytes))) + === + Data.Aeson.Success (constr (BS8.pack bytes)) + subject constr prefixChar = do + prop ("encodes with a " ++ [prefixChar] ++ " prefix") $ + testPrefix constr prefixChar + prop "Round Trips" $ + roundTrip constr + + describe "PersistDbSpecific" $ do + subject (PersistLiteral_ DbSpecific) 'p' + describe "PersistLiteral" $ do + subject PersistLiteral 'l' + describe "PersistLiteralEscaped" $ do + subject PersistLiteralEscaped 'e' + +takePrefix :: Value -> Value +takePrefix (String a) = String (T.take 1 a) +takePrefix a = a diff --git a/persistent/test/Database/Persist/QuasiSpec.hs b/persistent/test/Database/Persist/QuasiSpec.hs new file mode 100644 index 000000000..c0320cd41 --- /dev/null +++ b/persistent/test/Database/Persist/QuasiSpec.hs @@ -0,0 +1,901 @@ +{-# LANGUAGE CPP #-} +{-# LANGUAGE OverloadedLists #-} +{-# LANGUAGE QuasiQuotes #-} +{-# LANGUAGE RecordWildCards #-} + +module Database.Persist.QuasiSpec where + +import Prelude hiding (lines) + +import Data.List hiding (lines) +import Data.List.NonEmpty (NonEmpty(..), (<|)) +import qualified Data.List.NonEmpty as NEL +import qualified Data.Map as Map +import qualified Data.Text as T +import Database.Persist.EntityDef.Internal +import Database.Persist.Quasi +import Database.Persist.Quasi.Internal +import Database.Persist.Types +import Test.Hspec +import Test.Hspec.QuickCheck +import Test.QuickCheck +import Text.Shakespeare.Text (st) + +spec :: Spec +spec = describe "Quasi" $ do + describe "splitExtras" $ do + let helloWorldTokens = Token "hello" :| [Token "world"] + foobarbazTokens = Token "foo" :| [Token "bar", Token "baz"] + it "works" $ do + splitExtras [] + `shouldBe` + mempty + it "works2" $ do + splitExtras + [ Line 0 helloWorldTokens + ] + `shouldBe` + ( [NEL.toList helloWorldTokens], mempty ) + it "works3" $ do + splitExtras + [ Line 0 helloWorldTokens + , Line 2 foobarbazTokens + ] + `shouldBe` + ( [NEL.toList helloWorldTokens, NEL.toList foobarbazTokens], mempty ) + it "works4" $ do + splitExtras + [ Line 0 [Token "Product"] + , Line 2 (Token <$> ["name", "Text"]) + , Line 2 (Token <$> ["added", "UTCTime", "default=CURRENT_TIMESTAMP"]) + ] + `shouldBe` + ( [] + , Map.fromList + [ ("Product", + [ ["name", "Text"] + , ["added", "UTCTime", "default=CURRENT_TIMESTAMP"] + ] + ) ] + ) + it "works5" $ do + splitExtras + [ Line 0 [Token "Product"] + , Line 2 (Token <$> ["name", "Text"]) + , Line 4 [Token "ExtraBlock"] + , Line 4 (Token <$> ["added", "UTCTime", "default=CURRENT_TIMESTAMP"]) + ] + `shouldBe` + ( [] + , Map.fromList + [ ("Product", + [ ["name", "Text"] + , ["ExtraBlock"] + , ["added", "UTCTime", "default=CURRENT_TIMESTAMP"] + ] + )] + ) + describe "takeColsEx" $ do + let subject = takeColsEx upperCaseSettings + it "fails on a single word" $ do + subject ["asdf"] + `shouldBe` + Nothing + it "works if it has a name and a type" $ do + subject ["asdf", "Int"] + `shouldBe` + Just UnboundFieldDef + { unboundFieldNameHS = FieldNameHS "asdf" + , unboundFieldNameDB = FieldNameDB "asdf" + , unboundFieldType = FTTypeCon Nothing "Int" + , unboundFieldAttrs = [] + , unboundFieldStrict = True + , unboundFieldCascade = noCascade + , unboundFieldComments = Nothing + , unboundFieldGenerated = Nothing + } + it "works if it has a name, type, and cascade" $ do + subject ["asdf", "Int", "OnDeleteCascade", "OnUpdateCascade"] + `shouldBe` + Just UnboundFieldDef + { unboundFieldNameHS = FieldNameHS "asdf" + , unboundFieldNameDB = FieldNameDB "asdf" + , unboundFieldType = FTTypeCon Nothing "Int" + , unboundFieldAttrs = [] + , unboundFieldStrict = True + , unboundFieldCascade = FieldCascade (Just Cascade) (Just Cascade) + , unboundFieldComments = Nothing + , unboundFieldGenerated = Nothing + } + it "never tries to make a refernece" $ do + subject ["asdf", "UserId", "OnDeleteCascade"] + `shouldBe` + Just UnboundFieldDef + { unboundFieldNameHS = FieldNameHS "asdf" + , unboundFieldNameDB = FieldNameDB "asdf" + , unboundFieldType = FTTypeCon Nothing "UserId" + , unboundFieldAttrs = [] + , unboundFieldStrict = True + , unboundFieldCascade = FieldCascade Nothing (Just Cascade) + , unboundFieldComments = Nothing + , unboundFieldGenerated = Nothing + } + + describe "parseLine" $ do + it "returns nothing when line is just whitespace" $ + parseLine " " `shouldBe` Nothing + + it "handles normal words" $ + parseLine " foo bar baz" `shouldBe` + Just + ( Line 1 + [ Token "foo" + , Token "bar" + , Token "baz" + ] + ) + + it "handles quotes" $ + parseLine " \"foo bar\" \"baz\"" `shouldBe` + Just + ( Line 2 + [ Token "foo bar" + , Token "baz" + ] + ) + + it "handles quotes mid-token" $ + parseLine " x=\"foo bar\" \"baz\"" `shouldBe` + Just + ( Line 2 + [ Token "x=foo bar" + , Token "baz" + ] + ) + + it "handles escaped quote mid-token" $ + parseLine " x=\\\"foo bar\" \"baz\"" `shouldBe` + Just + ( Line 2 + [ Token "x=\\\"foo" + , Token "bar\"" + , Token "baz" + ] + ) + + it "handles unnested parantheses" $ + parseLine " (foo bar) (baz)" `shouldBe` + Just + ( Line 2 + [ Token "foo bar" + , Token "baz" + ] + ) + + it "handles unnested parantheses mid-token" $ + parseLine " x=(foo bar) (baz)" `shouldBe` + Just + ( Line 2 + [ Token "x=foo bar" + , Token "baz" + ] + ) + + it "handles nested parantheses" $ + parseLine " (foo (bar)) (baz)" `shouldBe` + Just + ( Line 2 + [ Token "foo (bar)" + , Token "baz" + ] + ) + + it "escaping" $ + parseLine " (foo \\(bar) y=\"baz\\\"\"" `shouldBe` + Just + ( Line 2 + [ Token "foo (bar" + , Token "y=baz\"" + ] + ) + + it "mid-token quote in later token" $ + parseLine "foo bar baz=(bin\")" `shouldBe` + Just + ( Line 0 + [ Token "foo" + , Token "bar" + , Token "baz=bin\"" + ] + ) + + describe "comments" $ do + it "recognizes one line" $ do + parseLine "-- | this is a comment" `shouldBe` + Just + ( Line 0 + [ DocComment "this is a comment" + ] + ) + + it "works if comment is indented" $ do + parseLine " -- | comment" `shouldBe` + Just (Line 2 [DocComment "comment"]) + + describe "parse" $ do + let subject = + [st| +Bicycle -- | this is a bike + brand String -- | the brand of the bike + ExtraBike + foo bar -- | this is a foo bar + baz + deriving Eq +-- | This is a Car +Car + -- | the make of the Car + make String + -- | the model of the Car + model String + UniqueModel model + deriving Eq Show ++Vehicle + bicycle BicycleId -- | the bike reference + car CarId -- | the car reference + + |] + let [bicycle, car, vehicle] = parse lowerCaseSettings subject + + it "should parse the `entityHaskell` field" $ do + getUnboundEntityNameHS bicycle `shouldBe` EntityNameHS "Bicycle" + getUnboundEntityNameHS car `shouldBe` EntityNameHS "Car" + getUnboundEntityNameHS vehicle `shouldBe` EntityNameHS "Vehicle" + + it "should parse the `entityDB` field" $ do + entityDB (unboundEntityDef bicycle) `shouldBe` EntityNameDB "bicycle" + entityDB (unboundEntityDef car) `shouldBe` EntityNameDB "car" + entityDB (unboundEntityDef vehicle) `shouldBe` EntityNameDB "vehicle" + + it "should parse the `entityAttrs` field" $ do + entityAttrs (unboundEntityDef bicycle) `shouldBe` ["-- | this is a bike"] + entityAttrs (unboundEntityDef car) `shouldBe` [] + entityAttrs (unboundEntityDef vehicle) `shouldBe` [] + + it "should parse the `unboundEntityFields` field" $ do + let simplifyField field = + (unboundFieldNameHS field, unboundFieldNameDB field, unboundFieldComments field) + (simplifyField <$> unboundEntityFields bicycle) `shouldBe` + [ (FieldNameHS "brand", FieldNameDB "brand", Nothing) + ] + (simplifyField <$> unboundEntityFields car) `shouldBe` + [ (FieldNameHS "make", FieldNameDB "make", Just "the make of the Car\n") + , (FieldNameHS "model", FieldNameDB "model", Just "the model of the Car\n") + ] + (simplifyField <$> unboundEntityFields vehicle) `shouldBe` + [ (FieldNameHS "bicycle", FieldNameDB "bicycle", Nothing) + , (FieldNameHS "car", FieldNameDB "car", Nothing) + ] + + it "should parse the `entityUniques` field" $ do + let simplifyUnique unique = + (uniqueHaskell unique, uniqueFields unique) + (simplifyUnique <$> entityUniques (unboundEntityDef bicycle)) `shouldBe` [] + (simplifyUnique <$> entityUniques (unboundEntityDef car)) `shouldBe` + [ (ConstraintNameHS "UniqueModel", [(FieldNameHS "model", FieldNameDB "model")]) + ] + (simplifyUnique <$> entityUniques (unboundEntityDef vehicle)) `shouldBe` [] + + it "should parse the `entityForeigns` field" $ do + let [user, notification] = parse lowerCaseSettings [st| +User + name Text + emailFirst Text + emailSecond Text + + UniqueEmail emailFirst emailSecond + +Notification + content Text + sentToFirst Text + sentToSecond Text + + Foreign User fk_noti_user sentToFirst sentToSecond References emailFirst emailSecond +|] + unboundForeignDefs user `shouldBe` [] + map unboundForeignDef (unboundForeignDefs notification) `shouldBe` + [ ForeignDef + { foreignRefTableHaskell = EntityNameHS "User" + , foreignRefTableDBName = EntityNameDB "user" + , foreignConstraintNameHaskell = ConstraintNameHS "fk_noti_user" + , foreignConstraintNameDBName = ConstraintNameDB "notificationfk_noti_user" + , foreignFieldCascade = FieldCascade Nothing Nothing + , foreignFields = + [] + -- the foreign fields are not set yet in an unbound + -- entity def + , foreignAttrs = [] + , foreignNullable = False + , foreignToPrimary = False + } + ] + + it "should parse the `entityDerives` field" $ do + entityDerives (unboundEntityDef bicycle) `shouldBe` ["Eq"] + entityDerives (unboundEntityDef car) `shouldBe` ["Eq", "Show"] + entityDerives (unboundEntityDef vehicle) `shouldBe` [] + + it "should parse the `entityEntities` field" $ do + entityExtra (unboundEntityDef bicycle) `shouldBe` Map.singleton "ExtraBike" [["foo", "bar", "-- | this is a foo bar"], ["baz"]] + entityExtra (unboundEntityDef car) `shouldBe` mempty + entityExtra (unboundEntityDef vehicle) `shouldBe` mempty + + it "should parse the `entitySum` field" $ do + entitySum (unboundEntityDef bicycle) `shouldBe` False + entitySum (unboundEntityDef car) `shouldBe` False + entitySum (unboundEntityDef vehicle) `shouldBe` True + + it "should parse the `entityComments` field" $ do + entityComments (unboundEntityDef bicycle) `shouldBe` Nothing + entityComments (unboundEntityDef car) `shouldBe` Just "This is a Car\n" + entityComments (unboundEntityDef vehicle) `shouldBe` Nothing + + describe "foreign keys" $ do + let definitions = [st| +User + name Text + emailFirst Text + emailSecond Text + + UniqueEmail emailFirst emailSecond + +Notification + content Text + sentToFirst Text + sentToSecond Text + + Foreign User fk_noti_user sentToFirst sentToSecond References emailFirst emailSecond +|] + + it "should allow you to modify the FK name via provided function" $ do + let + flippedFK (EntityNameHS entName) (ConstraintNameHS conName) = + conName <> entName + [_user, notification] = + parse (setPsToFKName flippedFK lowerCaseSettings) definitions + [notificationForeignDef] = + unboundForeignDef <$> unboundForeignDefs notification + foreignConstraintNameDBName notificationForeignDef + `shouldBe` + ConstraintNameDB "fk_noti_user_notification" + + it "should allow you to enable snake cased foriegn keys via a preset configuration function" $ do + let + [_user, notification] = + parse (setPsUseSnakeCaseForiegnKeys lowerCaseSettings) definitions + [notificationForeignDef] = + unboundForeignDef <$> unboundForeignDefs notification + foreignConstraintNameDBName notificationForeignDef + `shouldBe` + ConstraintNameDB "notification_fk_noti_user" + + describe "ticked types" $ do + it "should be able to parse ticked types" $ do + let simplifyField field = + (unboundFieldNameHS field, unboundFieldType field) + let tickedDefinition = [st| +CustomerTransfer + customerId CustomerId + moneyAmount (MoneyAmount 'Customer 'Debit) + currencyCode CurrencyCode + uuid TransferUuid +|] + let [customerTransfer] = parse lowerCaseSettings tickedDefinition + let expectedType = + FTTypeCon Nothing "MoneyAmount" `FTApp` FTTypePromoted "Customer" `FTApp` FTTypePromoted "Debit" + + (simplifyField <$> unboundEntityFields customerTransfer) `shouldBe` + [ (FieldNameHS "customerId", FTTypeCon Nothing "CustomerId") + , (FieldNameHS "moneyAmount", expectedType) + , (FieldNameHS "currencyCode", FTTypeCon Nothing "CurrencyCode") + , (FieldNameHS "uuid", FTTypeCon Nothing "TransferUuid") + ] + + describe "parseFieldType" $ do + it "simple types" $ + parseFieldType "FooBar" `shouldBe` Right (FTTypeCon Nothing "FooBar") + it "module types" $ + parseFieldType "Data.Map.FooBar" `shouldBe` Right (FTTypeCon (Just "Data.Map") "FooBar") + it "application" $ + parseFieldType "Foo Bar" `shouldBe` Right ( + FTTypeCon Nothing "Foo" `FTApp` FTTypeCon Nothing "Bar") + it "application multiple" $ + parseFieldType "Foo Bar Baz" `shouldBe` Right ( + (FTTypeCon Nothing "Foo" `FTApp` FTTypeCon Nothing "Bar") + `FTApp` FTTypeCon Nothing "Baz" + ) + it "parens" $ do + let foo = FTTypeCon Nothing "Foo" + bar = FTTypeCon Nothing "Bar" + baz = FTTypeCon Nothing "Baz" + parseFieldType "Foo (Bar Baz)" `shouldBe` Right ( + foo `FTApp` (bar `FTApp` baz)) + it "lists" $ do + let foo = FTTypeCon Nothing "Foo" + bar = FTTypeCon Nothing "Bar" + bars = FTList bar + baz = FTTypeCon Nothing "Baz" + parseFieldType "Foo [Bar] Baz" `shouldBe` Right ( + foo `FTApp` bars `FTApp` baz) + it "fails on lowercase starts" $ do + parseFieldType "nothanks" `shouldBe` Left "PSFail ('n',\"othanks\")" + + describe "#1175 empty entity" $ do + let subject = + [st| +Foo + name String + age Int + +EmptyEntity + +Bar + name String + +Baz + a Int + b String + c FooId + |] + + let preparsed = + preparse subject + it "preparse works" $ do + (length <$> preparsed) `shouldBe` Just 10 + + let fooLines = + [ Line + { lineIndent = 0 + , tokens = Token "Foo" :| [] + } + , Line + { lineIndent = 4 + , tokens = Token "name" :| [Token "String"] + } + , Line + { lineIndent = 4 + , tokens = Token "age" :| [Token "Int"] + } + ] + emptyLines = + [ Line + { lineIndent = 0 + , tokens = Token "EmptyEntity" :| [] + } + ] + barLines = + [ Line + { lineIndent = 0 + , tokens = Token "Bar" :| [] + } + , Line + { lineIndent = 4 + , tokens = Token "name" :| [Token "String"] + } + ] + bazLines = + [ Line + { lineIndent = 0 + , tokens = Token "Baz" :| [] + } + , Line + { lineIndent = 4 + , tokens = Token "a" :| [Token "Int"] + } + , Line + { lineIndent = 4 + , tokens = Token "b" :| [Token "String"] + } + , Line + { lineIndent = 4 + , tokens = Token "c" :| [Token "FooId"] + } + ] + + let + linesAssociated = + case preparsed of + Nothing -> error "preparsed failed" + Just lines -> associateLines lines + it "associateLines works" $ do + linesAssociated `shouldMatchList` + [ LinesWithComments + { lwcLines = NEL.fromList fooLines + , lwcComments = [] + } + , LinesWithComments (NEL.fromList emptyLines) [] + , LinesWithComments (NEL.fromList barLines) [] + , LinesWithComments (NEL.fromList bazLines) [] + ] + + it "parse works" $ do + let test name'fieldCount parsedList = do + case (name'fieldCount, parsedList) of + ([], []) -> + pure () + ((name, fieldCount) : _, []) -> + expectationFailure + $ "Expected an entity with name " + <> name + <> " and " <> show fieldCount <> " fields" + <> ", but the list was empty..." + + ((name, fieldCount) : ys, (x : xs)) -> do + let + UnboundEntityDef {..} = + x + (unEntityNameHS (getUnboundEntityNameHS x), length unboundEntityFields) + `shouldBe` + (T.pack name, fieldCount) + test ys xs + ([], _:_) -> + expectationFailure + "more entities parsed than expected" + + result = + parse lowerCaseSettings subject + length result `shouldBe` 4 + + test + [ ("Foo", 2) + , ("EmptyEntity", 0) + , ("Bar", 1) + , ("Baz", 3) + ] + result + + + describe "preparse" $ do + prop "omits lines that are only whitespace" $ \len -> do + ws <- vectorOf len arbitraryWhiteSpaceChar + pure $ preparse (T.pack ws) === Nothing + + it "recognizes entity" $ do + let expected = + Line { lineIndent = 0, tokens = pure (Token "Person") } :| + [ Line { lineIndent = 2, tokens = Token "name" :| [Token "String"] } + , Line { lineIndent = 2, tokens = Token "age" :| [Token "Int"] } + ] + preparse "Person\n name String\n age Int" `shouldBe` Just expected + + it "recognizes comments" $ do + let text = "Foo\n x X\n-- | Hello\nBar\n name String" + let expected = + Line { lineIndent = 0, tokens = pure (Token "Foo") } :| + [ Line { lineIndent = 2, tokens = Token "x" :| [Token "X"] } + , Line { lineIndent = 0, tokens = pure (DocComment "Hello") } + , Line { lineIndent = 0, tokens = pure (Token "Bar") } + , Line { lineIndent = 1, tokens = Token "name" :| [Token "String"] } + ] + preparse text `shouldBe` Just expected + + it "preparse indented" $ do + let t = T.unlines + [ " Foo" + , " x X" + , " -- | Comment" + , " -- hidden comment" + , " Bar" + , " name String" + ] + expected = + Line { lineIndent = 2, tokens = pure (Token "Foo") } :| + [ Line { lineIndent = 4, tokens = Token "x" :| [Token "X"] } + , Line { lineIndent = 2, tokens = pure (DocComment "Comment") } + , Line { lineIndent = 2, tokens = pure (Token "Bar") } + , Line { lineIndent = 4, tokens = Token "name" :| [Token "String"] } + ] + preparse t `shouldBe` Just expected + + it "preparse extra blocks" $ do + let t = T.unlines + [ "LowerCaseTable" + , " name String" + , " ExtraBlock" + , " foo bar" + , " baz" + , " ExtraBlock2" + , " something" + ] + expected = + Line { lineIndent = 0, tokens = pure (Token "LowerCaseTable") } :| + [ Line { lineIndent = 2, tokens = Token "name" :| [Token "String"] } + , Line { lineIndent = 2, tokens = pure (Token "ExtraBlock") } + , Line { lineIndent = 4, tokens = Token "foo" :| [Token "bar"] } + , Line { lineIndent = 4, tokens = pure (Token "baz") } + , Line { lineIndent = 2, tokens = pure (Token "ExtraBlock2") } + , Line { lineIndent = 4, tokens = pure (Token "something") } + ] + preparse t `shouldBe` Just expected + + it "field comments" $ do + let text = T.unlines + [ "-- | Model" + , "Foo" + , " -- | Field" + , " name String" + ] + expected = + Line { lineIndent = 0, tokens = [DocComment "Model"] } :| + [ Line { lineIndent = 0, tokens = [Token "Foo"] } + , Line { lineIndent = 2, tokens = [DocComment "Field"] } + , Line { lineIndent = 2, tokens = (Token <$> ["name", "String"]) } + ] + preparse text `shouldBe` Just expected + + describe "associateLines" $ do + let foo = + Line + { lineIndent = 0 + , tokens = pure (Token "Foo") + } + name'String = + Line + { lineIndent = 2 + , tokens = Token "name" :| [Token "String"] + } + comment = + Line + { lineIndent = 0 + , tokens = pure (DocComment "comment") + } + it "works" $ do + associateLines + ( comment :| + [ foo + , name'String + ]) + `shouldBe` + [ LinesWithComments + { lwcComments = ["comment"] + , lwcLines = foo :| [name'String] + } + ] + let bar = + Line + { lineIndent = 0 + , tokens = Token "Bar" :| [Token "sql", Token "=", Token "bars"] + } + age'Int = + Line + { lineIndent = 1 + , tokens = Token "age" :| [Token "Int"] + } + it "works when used consecutively" $ do + associateLines + ( bar :| + [ age'Int + , comment + , foo + , name'String + ]) + `shouldBe` + [ LinesWithComments + { lwcComments = [] + , lwcLines = bar :| [age'Int] + } + , LinesWithComments + { lwcComments = ["comment"] + , lwcLines = foo :| [name'String] + } + ] + it "works with textual input" $ do + let text = preparse "Foo\n x X\n-- | Hello\nBar\n name String" + associateLines <$> text + `shouldBe` Just + [ LinesWithComments + { lwcLines = + Line {lineIndent = 0, tokens = Token "Foo" :| []} + :| [ Line {lineIndent = 2, tokens = Token "x" :| [Token "X"]} ] + , lwcComments = + [] + } + , LinesWithComments + { lwcLines = + Line {lineIndent = 0, tokens = Token "Bar" :| []} + :| [ Line {lineIndent = 1, tokens = Token "name" :| [Token "String"]}] + , lwcComments = + ["Hello"] + } + ] + it "works with extra blocks" $ do + let text = preparse . T.unlines $ + [ "LowerCaseTable" + , " Id sql=my_id" + , " fullName Text" + , " ExtraBlock" + , " foo bar" + , " baz" + , " bin" + , " ExtraBlock2" + , " something" + ] + associateLines <$> text `shouldBe` Just + [ LinesWithComments + { lwcLines = + Line { lineIndent = 0, tokens = pure (Token "LowerCaseTable") } :| + [ Line { lineIndent = 4, tokens = Token "Id" :| [Token "sql=my_id"] } + , Line { lineIndent = 4, tokens = Token "fullName" :| [Token "Text"] } + , Line { lineIndent = 4, tokens = pure (Token "ExtraBlock") } + , Line { lineIndent = 8, tokens = Token "foo" :| [Token "bar"] } + , Line { lineIndent = 8, tokens = pure (Token "baz") } + , Line { lineIndent = 8, tokens = pure (Token "bin") } + , Line { lineIndent = 4, tokens = pure (Token "ExtraBlock2") } + , Line { lineIndent = 8, tokens = pure (Token "something") } + ] + , lwcComments = [] + } + ] + + it "works with extra blocks twice" $ do + let text = preparse . T.unlines $ + [ "IdTable" + , " Id Day default=CURRENT_DATE" + , " name Text" + , "" + , "LowerCaseTable" + , " Id sql=my_id" + , " fullName Text" + , " ExtraBlock" + , " foo bar" + , " baz" + , " bin" + , " ExtraBlock2" + , " something" + ] + associateLines <$> text `shouldBe` Just + [ LinesWithComments + { lwcLines = Line 0 (pure (Token "IdTable")) :| + [ Line 4 (Token "Id" <| Token "Day" :| [Token "default=CURRENT_DATE"]) + , Line 4 (Token "name" :| [Token "Text"]) + ] + , lwcComments = [] + } + , LinesWithComments + { lwcLines = + Line { lineIndent = 0, tokens = pure (Token "LowerCaseTable") } :| + [ Line { lineIndent = 4, tokens = Token "Id" :| [Token "sql=my_id"] } + , Line { lineIndent = 4, tokens = Token "fullName" :| [Token "Text"] } + , Line { lineIndent = 4, tokens = pure (Token "ExtraBlock") } + , Line { lineIndent = 8, tokens = Token "foo" :| [Token "bar"] } + , Line { lineIndent = 8, tokens = pure (Token "baz") } + , Line { lineIndent = 8, tokens = pure (Token "bin") } + , Line { lineIndent = 4, tokens = pure (Token "ExtraBlock2") } + , Line { lineIndent = 8, tokens = pure (Token "something") } + ] + , lwcComments = [] + } + ] + + + it "works with field comments" $ do + let text = preparse . T.unlines $ + [ "-- | Model" + , "Foo" + , " -- | Field" + , " name String" + ] + associateLines <$> text `shouldBe` Just + [ LinesWithComments + { lwcLines = + Line { lineIndent = 0, tokens = (Token "Foo") :| [] } :| + [ Line { lineIndent = 2, tokens = pure (DocComment "Field") } + , Line { lineIndent = 2, tokens = Token "name" :| [Token "String"] } + ] + , lwcComments = + ["Model"] + } + ] + + + + describe "parseLines" $ do + let lines = + T.unlines + [ "-- | Comment" + , "Foo" + , " -- | Field" + , " name String" + , " age Int" + , " Extra" + , " foo bar" + , " baz" + , " Extra2" + , " something" + ] + let [subject] = parse lowerCaseSettings lines + it "produces the right name" $ do + getUnboundEntityNameHS subject `shouldBe` EntityNameHS "Foo" + describe "unboundEntityFields" $ do + let fields = unboundEntityFields subject + it "has the right field names" $ do + map unboundFieldNameHS fields `shouldMatchList` + [ FieldNameHS "name" + , FieldNameHS "age" + ] + it "has comments" $ do + map unboundFieldComments fields `shouldBe` + [ Just "Field\n" + , Nothing + ] + it "has the comments" $ do + entityComments (unboundEntityDef subject) `shouldBe` + Just "Comment\n" + it "combines extrablocks" $ do + entityExtra (unboundEntityDef subject) `shouldBe` Map.fromList + [ ("Extra", [["foo", "bar"], ["baz"]]) + , ("Extra2", [["something"]]) + ] + describe "works with extra blocks" $ do + let [_, lowerCaseTable, idTable] = + case parse lowerCaseSettings $ T.unlines + [ "" + , "IdTable" + , " Id Day default=CURRENT_DATE" + , " name Text" + , "" + , "LowerCaseTable" + , " Id sql=my_id" + , " fullName Text" + , " ExtraBlock" + , " foo bar" + , " baz" + , " bin" + , " ExtraBlock2" + , " something" + , "" + , "IdTable" + , " Id Day default=CURRENT_DATE" + , " name Text" + , "" + ] of + [a, b, c] -> + [a, b, c] :: [UnboundEntityDef] + xs -> + error + $ "Expected 3 elements in list, got: " + <> show (length xs) + <> ", list contents: \n\n" <> intercalate "\n" (map show xs) + describe "idTable" $ do + let UnboundEntityDef { unboundEntityDef = EntityDef {..}, .. } = idTable + it "has no extra blocks" $ do + entityExtra `shouldBe` mempty + it "has the right name" $ do + entityHaskell `shouldBe` EntityNameHS "IdTable" + it "has the right fields" $ do + map unboundFieldNameHS unboundEntityFields `shouldMatchList` + [ FieldNameHS "name" + ] + describe "lowerCaseTable" $ do + let UnboundEntityDef { unboundEntityDef = EntityDef {..}, ..} = lowerCaseTable + it "has the right name" $ do + entityHaskell `shouldBe` EntityNameHS "LowerCaseTable" + it "has the right fields" $ do + map unboundFieldNameHS unboundEntityFields `shouldMatchList` + [ FieldNameHS "fullName" + ] + it "has ExtraBlock" $ do + Map.lookup "ExtraBlock" entityExtra + `shouldBe` Just + [ ["foo", "bar"] + , ["baz"] + , ["bin"] + ] + it "has ExtraBlock2" $ do + Map.lookup "ExtraBlock2" entityExtra + `shouldBe` Just + [ ["something"] + ] + +arbitraryWhiteSpaceChar :: Gen Char +arbitraryWhiteSpaceChar = + oneof $ pure <$> [' ', '\t', '\n', '\r'] diff --git a/persistent/test/Database/Persist/TH/DiscoverEntitiesSpec.hs b/persistent/test/Database/Persist/TH/DiscoverEntitiesSpec.hs new file mode 100644 index 000000000..d8eef9b3e --- /dev/null +++ b/persistent/test/Database/Persist/TH/DiscoverEntitiesSpec.hs @@ -0,0 +1,60 @@ +{-# LANGUAGE DataKinds #-} +{-# LANGUAGE DerivingStrategies #-} +{-# LANGUAGE ExistentialQuantification #-} +{-# LANGUAGE FlexibleInstances #-} +{-# LANGUAGE GeneralizedNewtypeDeriving #-} +{-# LANGUAGE MultiParamTypeClasses #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE QuasiQuotes #-} +{-# LANGUAGE StandaloneDeriving #-} +{-# LANGUAGE TemplateHaskell #-} +{-# LANGUAGE TypeApplications #-} +{-# LANGUAGE TypeFamilies #-} +{-# LANGUAGE UndecidableInstances #-} + +module Database.Persist.TH.DiscoverEntitiesSpec where + +import TemplateTestImports + +import Data.Aeson + +import Data.Text (Text) + +import Language.Haskell.TH.Syntax + +import Database.Persist.ImplicitIdDef +import Database.Persist.ImplicitIdDef.Internal (fieldTypeFromTypeable) + +mkPersist sqlSettings [persistLowerCase| + +User + name String + age Int + +Dog + user UserId + name String + +Cat + enemy DogId + name String + +|] + +pass :: IO () +pass = pure () + +asIO :: IO a -> IO a +asIO = id + +$(pure []) + +spec :: Spec +spec = describe "DiscoverEntitiesSpec" $ do + let entities = $(discoverEntities) + it "should have all three entities" $ do + entities `shouldMatchList` + [ entityDef $ Proxy @User + , entityDef $ Proxy @Dog + , entityDef $ Proxy @Cat + ] diff --git a/persistent/test/Database/Persist/TH/EmbedSpec.hs b/persistent/test/Database/Persist/TH/EmbedSpec.hs new file mode 100644 index 000000000..7b9b6dcaf --- /dev/null +++ b/persistent/test/Database/Persist/TH/EmbedSpec.hs @@ -0,0 +1,169 @@ +{-# LANGUAGE DataKinds #-} +{-# LANGUAGE DerivingStrategies #-} +{-# LANGUAGE ExistentialQuantification #-} +{-# LANGUAGE FlexibleInstances #-} +{-# LANGUAGE GeneralizedNewtypeDeriving #-} +{-# LANGUAGE MultiParamTypeClasses #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE QuasiQuotes #-} +{-# LANGUAGE StandaloneDeriving #-} +{-# LANGUAGE TemplateHaskell #-} +{-# LANGUAGE TypeApplications #-} +{-# LANGUAGE TypeFamilies #-} +{-# LANGUAGE UndecidableInstances #-} + +module Database.Persist.TH.EmbedSpec where + +import TemplateTestImports + +import Data.Text (Text) +import qualified Data.Map as M +import qualified Data.Text as T + +import Database.Persist.ImplicitIdDef +import Database.Persist.ImplicitIdDef.Internal (fieldTypeFromTypeable) +import Database.Persist.Types +import Database.Persist.Types +import Database.Persist.EntityDef +import Database.Persist.EntityDef.Internal (toEmbedEntityDef) + +mkPersist sqlSettings [persistLowerCase| + +Thing + name String + foo String MigrationOnly + + deriving Eq Show + +EmbedThing + someThing Thing + + deriving Eq Show + +SelfEmbed + name Text + self SelfEmbed Maybe + deriving Eq Show + +MutualEmbed + thing MutualTarget + +MutualTarget + thing [MutualEmbed] + +ModelWithList + names [Text] + +HasMap + map (M.Map T.Text T.Text) + deriving Show Eq Read Ord + +MapIdValue + map (M.Map T.Text (Key Thing)) + deriving Show Eq Read Ord + +|] + +pass :: IO () +pass = pure () + +asIO :: IO a -> IO a +asIO = id + +spec :: Spec +spec = describe "EmbedSpec" $ do + describe "ModelWithList" $ do + let + edef = + entityDef $ Proxy @ModelWithList + [fieldDef] = + getEntityFields edef + it "has the right type" $ do + fieldType fieldDef + `shouldBe` + FTList (FTTypeCon Nothing "Text") + it "has the right sqltype" $ do + fieldSqlType fieldDef + `shouldBe` + SqlString + describe "MapIdValue" $ do + let + edef = + entityDef $ Proxy @MapIdValue + [fieldDef] = + getEntityFields edef + it "has the right type" $ do + fieldType fieldDef + `shouldBe` + ( FTTypeCon (Just "M") "Map" + `FTApp` + FTTypeCon (Just "T") "Text" + `FTApp` + (FTTypeCon Nothing "Key" + `FTApp` + FTTypeCon Nothing "Thing" + ) + ) + it "has the right sqltype" $ do + fieldSqlType fieldDef + `shouldBe` + SqlString + describe "HasMap" $ do + let + edef = + entityDef $ Proxy @HasMap + [fieldDef] = + getEntityFields edef + it "has the right type" $ do + fieldType fieldDef + `shouldBe` + ( FTTypeCon (Just "M") "Map" + `FTApp` + FTTypeCon (Just "T") "Text" + `FTApp` + FTTypeCon (Just "T") "Text" + ) + it "has the right sqltype" $ do + fieldSqlType fieldDef + `shouldBe` + SqlString + + describe "SomeThing" $ do + let + edef = + entityDef $ Proxy @Thing + describe "toEmbedEntityDef" $ do + let + embedDef = + toEmbedEntityDef edef + it "should have the same field count as Haskell fields" $ do + length (embeddedFields embedDef) + `shouldBe` + length (getEntityFields edef) + + describe "EmbedThing" $ do + it "generates the right constructor" $ do + let embedThing :: EmbedThing + embedThing = EmbedThing (Thing "asdf") + pass + + describe "SelfEmbed" $ do + let + edef = + entityDef $ Proxy @SelfEmbed + describe "fieldReference" $ do + let + [nameField, selfField] = getEntityFields edef + it "has self reference" $ do + fieldReference selfField + `shouldBe` + NoReference + describe "toEmbedEntityDef" $ do + let + embedDef = + toEmbedEntityDef edef + it "has the same field count as regular def" $ do + length (getEntityFields edef) + `shouldBe` + length (embeddedFields embedDef) + diff --git a/persistent/test/Database/Persist/TH/ForeignRefSpec.hs b/persistent/test/Database/Persist/TH/ForeignRefSpec.hs new file mode 100644 index 000000000..b4e694e57 --- /dev/null +++ b/persistent/test/Database/Persist/TH/ForeignRefSpec.hs @@ -0,0 +1,178 @@ +{-# LANGUAGE DataKinds #-} +{-# LANGUAGE DeriveGeneric #-} +{-# LANGUAGE DerivingStrategies #-} +{-# LANGUAGE ExistentialQuantification #-} +{-# LANGUAGE FlexibleInstances #-} +{-# LANGUAGE GeneralizedNewtypeDeriving #-} +{-# LANGUAGE MultiParamTypeClasses #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE QuasiQuotes #-} +{-# LANGUAGE RecordWildCards #-} +{-# LANGUAGE StandaloneDeriving #-} +{-# LANGUAGE TemplateHaskell #-} +{-# LANGUAGE TypeApplications #-} +{-# LANGUAGE TypeFamilies #-} +{-# LANGUAGE UndecidableInstances #-} +-- +-- DeriveAnyClass is not actually used by persistent-template +-- But a long standing bug was that if it was enabled, it was used to derive instead of GeneralizedNewtypeDeriving +-- This was fixed by using DerivingStrategies to specify newtype deriving should be used. +-- This pragma is left here as a "test" that deriving works when DeriveAnyClass is enabled. +-- See https://github.com/yesodweb/persistent/issues/578 +{-# LANGUAGE DeriveAnyClass #-} + +module Database.Persist.TH.ForeignRefSpec where + +import Control.Applicative (Const(..)) +import Data.Aeson +import Data.ByteString.Lazy.Char8 () +import Data.Coerce +import Data.Functor.Identity (Identity(..)) +import Data.Int +import qualified Data.List as List +import Data.Proxy +import Data.Text (Text, pack) +import GHC.Generics (Generic) +import Test.Hspec +import Test.Hspec.QuickCheck +import Test.QuickCheck.Arbitrary +import Test.QuickCheck.Gen (Gen) + +import Database.Persist +import Database.Persist.EntityDef.Internal +import Database.Persist.Sql +import Database.Persist.Sql.Util +import Database.Persist.TH +import TemplateTestImports + +mkPersist sqlSettings [persistLowerCase| + +HasCustomName sql=custom_name + name Text + +ForeignTarget + name Text + deriving Eq Show + +ForeignSource + name Text + foreignTargetId ForeignTargetId + Foreign ForeignTarget fk_s_t foreignTargetId + +ForeignPrimary + name Text + Primary name + deriving Eq Show + +ForeignPrimarySource + name Text + Foreign ForeignPrimary fk_name_target name + +NullableRef + name Text Maybe + Foreign ForeignPrimary fk_nullable_ref name + +ParentImplicit + name Text + +ChildImplicit + name Text + parent ParentImplicitId OnDeleteCascade OnUpdateCascade + +ParentExplicit + name Text + Primary name + +ChildExplicit + name Text + Foreign ParentExplicit OnDeleteCascade OnUpdateCascade fkparent name +|] + +spec :: Spec +spec = describe "ForeignRefSpec" $ do + describe "HasCustomName" $ do + let + edef = + entityDef $ Proxy @HasCustomName + it "should have a custom db name" $ do + entityDB edef + `shouldBe` + EntityNameDB "custom_name" + + it "should compile" $ do + True `shouldBe` True + + describe "ForeignPrimarySource" $ do + let + fpsDef = + entityDef $ Proxy @ForeignPrimarySource + [foreignDef] = + entityForeigns fpsDef + it "has the right type" $ do + foreignPrimarySourceFk_name_target (ForeignPrimarySource "asdf") + `shouldBe` + ForeignPrimaryKey "asdf" + + describe "Cascade" $ do + describe "Explicit" $ do + let + parentDef = + entityDef $ Proxy @ParentExplicit + childDef = + entityDef $ Proxy @ChildExplicit + childForeigns = + entityForeigns childDef + it "should have a single foreign reference defined" $ do + case entityForeigns childDef of + [a] -> + pure () + as -> + expectationFailure . mconcat $ + [ "Expected one foreign reference on childDef, " + , "got: " + , show as + ] + let + [ForeignDef {..}] = + childForeigns + + describe "ChildExplicit" $ do + it "should have the right target table" $ do + foreignRefTableHaskell `shouldBe` + EntityNameHS "ParentExplicit" + foreignRefTableDBName `shouldBe` + EntityNameDB "parent_explicit" + it "should have the right cascade behavior" $ do + foreignFieldCascade + `shouldBe` + FieldCascade + { fcOnUpdate = + Just Cascade + , fcOnDelete = + Just Cascade + } + it "is not nullable" $ do + foreignNullable `shouldBe` False + it "is to the Primary key" $ do + foreignToPrimary `shouldBe` True + + describe "Implicit" $ do + let + parentDef = + entityDef $ Proxy @ParentImplicit + childDef = + entityDef $ Proxy @ChildImplicit + childFields = + entityFields childDef + describe "ChildImplicit" $ do + case childFields of + [nameField, parentIdField] -> do + it "parentId has reference" $ do + fieldReference parentIdField `shouldBe` + ForeignRef (EntityNameHS "ParentImplicit") + as -> + error . mconcat $ + [ "Expected one foreign reference on childDef, " + , "got: " + , show as + ] diff --git a/persistent/test/Database/Persist/TH/ImplicitIdColSpec.hs b/persistent/test/Database/Persist/TH/ImplicitIdColSpec.hs new file mode 100644 index 000000000..f1072a34e --- /dev/null +++ b/persistent/test/Database/Persist/TH/ImplicitIdColSpec.hs @@ -0,0 +1,64 @@ +{-# LANGUAGE DataKinds #-} +{-# LANGUAGE DerivingStrategies #-} +{-# LANGUAGE ExistentialQuantification #-} +{-# LANGUAGE FlexibleInstances #-} +{-# LANGUAGE GeneralizedNewtypeDeriving #-} +{-# LANGUAGE MultiParamTypeClasses #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE QuasiQuotes #-} +{-# LANGUAGE StandaloneDeriving #-} +{-# LANGUAGE TemplateHaskell #-} +{-# LANGUAGE TypeApplications #-} +{-# LANGUAGE TypeFamilies #-} +{-# LANGUAGE UndecidableInstances #-} + +module Database.Persist.TH.ImplicitIdColSpec where + +import TemplateTestImports + +import Data.Text (Text) + +import Database.Persist.ImplicitIdDef +import Database.Persist.ImplicitIdDef.Internal (fieldTypeFromTypeable) + +do + let + uuidDef = + mkImplicitIdDef @Text "uuid_generate_v1mc()" + settings = + setImplicitIdDef uuidDef sqlSettings + + mkPersist settings [persistLowerCase| + + User + name String + age Int + + |] + +pass :: IO () +pass = pure () + +asIO :: IO a -> IO a +asIO = id + +spec :: Spec +spec = describe "ImplicitIdColSpec" $ do + describe "UserKey" $ do + it "has type Text -> Key User" $ do + let + userKey = UserKey "Hello" + _ = UserKey :: Text -> UserId + pass + + describe "getEntityId" $ do + let + EntityIdField idField = + getEntityId (entityDef (Nothing @User)) + it "has SqlString SqlType" $ asIO $ do + fieldSqlType idField `shouldBe` SqlString + it "has Text FieldType" $ asIO $ do + pendingWith "currently returns UserId, may not be an issue" + fieldType idField + `shouldBe` + fieldTypeFromTypeable @Text diff --git a/persistent/test/Database/Persist/TH/JsonEncodingSpec.hs b/persistent/test/Database/Persist/TH/JsonEncodingSpec.hs new file mode 100644 index 000000000..7753d8c97 --- /dev/null +++ b/persistent/test/Database/Persist/TH/JsonEncodingSpec.hs @@ -0,0 +1,133 @@ +{-# LANGUAGE DataKinds #-} +{-# LANGUAGE DeriveGeneric #-} +{-# LANGUAGE DerivingStrategies #-} +{-# LANGUAGE ExistentialQuantification #-} +{-# LANGUAGE FlexibleInstances #-} +{-# LANGUAGE GeneralizedNewtypeDeriving #-} +{-# LANGUAGE MultiParamTypeClasses #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE QuasiQuotes #-} +{-# LANGUAGE RecordWildCards #-} +{-# LANGUAGE StandaloneDeriving #-} +{-# LANGUAGE TemplateHaskell #-} +{-# LANGUAGE TypeApplications #-} +{-# LANGUAGE TypeFamilies #-} +{-# LANGUAGE UndecidableInstances #-} + +module Database.Persist.TH.JsonEncodingSpec where + +import TemplateTestImports + +import Data.Aeson +import qualified Data.HashMap.Lazy as M +import Data.Text (Text) +import Test.QuickCheck.Instances () +import Test.Hspec.QuickCheck +import Test.QuickCheck + +import Database.Persist.EntityDef +import Database.Persist.ImplicitIdDef +import Database.Persist.ImplicitIdDef.Internal (fieldTypeFromTypeable) +import Database.Persist.Types + +mkPersist sqlSettings [persistLowerCase| +JsonEncoding json + name Text + age Int + Primary name + deriving Show Eq + +JsonEncoding2 json + name Text + age Int + blood Text + Primary name blood + deriving Show Eq + +JsonEncMigrationOnly json + name Text + age Int + foo Text MigrationOnly +|] + +instance Arbitrary JsonEncoding where + arbitrary = JsonEncoding <$> arbitrary <*> arbitrary + +instance Arbitrary JsonEncoding2 where + arbitrary = JsonEncoding2 <$> arbitrary <*> arbitrary <*> arbitrary + +pass :: IO () +pass = pure () + +asIO :: IO a -> IO a +asIO = id + +spec :: Spec +spec = describe "JsonEncodingSpec" $ do + let + subject = + JsonEncoding "Bob" 32 + subjectEntity = + Entity (JsonEncodingKey (jsonEncodingName subject)) subject + + it "encodes without an ID field" $ do + toJSON subjectEntity + `shouldBe` + Object (M.fromList + [ ("name", String "Bob") + , ("age", toJSON (32 :: Int)) + , ("id", String "Bob") + ]) + + it "decodes without an ID field" $ do + let + json_ = encode . Object . M.fromList $ + [ ("name", String "Bob") + , ("age", toJSON (32 :: Int)) + ] + eitherDecode json_ + `shouldBe` + Right subjectEntity + + prop "works with a Primary" $ \jsonEncoding -> do + let + ent = + Entity (JsonEncodingKey (jsonEncodingName jsonEncoding)) jsonEncoding + decode (encode ent) + `shouldBe` + Just ent + + prop "excuse me what" $ \j@JsonEncoding{..} -> do + let + ent = + Entity (JsonEncodingKey jsonEncodingName) j + toJSON ent + `shouldBe` + Object (M.fromList + [ ("name", toJSON jsonEncodingName) + , ("age", toJSON jsonEncodingAge) + , ("id", toJSON jsonEncodingName) + ]) + + prop "round trip works with composite key" $ \j@JsonEncoding2{..} -> do + let + key = JsonEncoding2Key jsonEncoding2Name jsonEncoding2Blood + ent = + Entity key j + decode (encode ent) + `shouldBe` + Just ent + + prop "works with a composite key" $ \j@JsonEncoding2{..} -> do + let + key = JsonEncoding2Key jsonEncoding2Name jsonEncoding2Blood + ent = + Entity key j + toJSON ent + `shouldBe` + Object (M.fromList + [ ("name", toJSON jsonEncoding2Name) + , ("age", toJSON jsonEncoding2Age) + , ("blood", toJSON jsonEncoding2Blood) + , ("id", toJSON key) + ]) diff --git a/persistent/test/Database/Persist/TH/MigrationOnlySpec.hs b/persistent/test/Database/Persist/TH/MigrationOnlySpec.hs new file mode 100644 index 000000000..bc1ff419f --- /dev/null +++ b/persistent/test/Database/Persist/TH/MigrationOnlySpec.hs @@ -0,0 +1,65 @@ +{-# LANGUAGE DataKinds #-} +{-# LANGUAGE DerivingStrategies #-} +{-# LANGUAGE ExistentialQuantification #-} +{-# LANGUAGE FlexibleInstances #-} +{-# LANGUAGE GeneralizedNewtypeDeriving #-} +{-# LANGUAGE MultiParamTypeClasses #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE QuasiQuotes #-} +{-# LANGUAGE StandaloneDeriving #-} +{-# LANGUAGE TemplateHaskell #-} +{-# LANGUAGE TypeApplications #-} +{-# LANGUAGE TypeFamilies #-} +{-# LANGUAGE UndecidableInstances #-} + +module Database.Persist.TH.MigrationOnlySpec where + +import TemplateTestImports + +import Data.Text (Text) + +import Database.Persist.ImplicitIdDef +import Database.Persist.ImplicitIdDef.Internal (fieldTypeFromTypeable) +import Database.Persist.Types + +mkPersist sqlSettings [persistLowerCase| + +HasMigrationOnly + name String + blargh Int MigrationOnly + + deriving Eq Show +|] + +pass :: IO () +pass = pure () + +asIO :: IO a -> IO a +asIO = id + +spec :: Spec +spec = describe "MigrationOnlySpec" $ do + describe "HasMigrationOnly" $ do + let + edef = + entityDef $ Proxy @HasMigrationOnly + describe "getEntityFields" $ do + it "has one field" $ do + length (getEntityFields edef) + `shouldBe` 1 + describe "getEntityFieldsDatabase" $ do + it "has two fields" $ do + length (getEntityFieldsDatabase edef) + `shouldBe` 2 + describe "toPersistFields" $ do + it "should have one field" $ do + map toPersistValue (toPersistFields (HasMigrationOnly "asdf")) + `shouldBe` + map toPersistValue [SomePersistField ("asdf" :: Text)] + describe "fromPersistValues" $ do + it "should work with only item in list" $ do + fromPersistValues [PersistText "Hello"] + `shouldBe` + Right (HasMigrationOnly "Hello") + + diff --git a/persistent/test/Database/Persist/TH/MultiBlockSpec.hs b/persistent/test/Database/Persist/TH/MultiBlockSpec.hs new file mode 100644 index 000000000..ba7207039 --- /dev/null +++ b/persistent/test/Database/Persist/TH/MultiBlockSpec.hs @@ -0,0 +1,79 @@ +{-# LANGUAGE DataKinds #-} +{-# LANGUAGE DerivingStrategies #-} +{-# LANGUAGE FlexibleContexts #-} +{-# LANGUAGE FlexibleInstances #-} +{-# LANGUAGE GADTs #-} +{-# LANGUAGE GeneralizedNewtypeDeriving #-} +{-# LANGUAGE MultiParamTypeClasses #-} +{-# LANGUAGE OverloadedLabels #-} +{-# LANGUAGE PartialTypeSignatures #-} +{-# LANGUAGE QuasiQuotes #-} +{-# LANGUAGE StandaloneDeriving #-} +{-# LANGUAGE TemplateHaskell #-} +{-# LANGUAGE TypeApplications #-} +{-# LANGUAGE TypeFamilies #-} +{-# LANGUAGE UndecidableInstances #-} + +module Database.Persist.TH.MultiBlockSpec where + +import TemplateTestImports + + +import Database.Persist.TH.MultiBlockSpec.Model + +share + [ mkPersistWith sqlSettings importDefList + ] + [persistLowerCase| + +Thing + name Text + Primary name + +ThingAuto + name Text + +MBBar + name Text + age Int + user UserId + thing ThingId + thingAuto ThingAutoId + profile MBDogId + + Foreign MBCompositePrimary bar_to_comp name age +|] + +spec :: Spec +spec = describe "MultiBlockSpec" $ do + describe "MBBar" $ do + let + edef = + entityDef $ Proxy @MBBar + describe "Foreign Key Works" $ do + let + [n, a, userRef, thingRef, thingAutoRef, profileRef] = + getEntityFields edef + it "User reference works" $ do + fieldReference userRef + `shouldBe` + ForeignRef + (EntityNameHS "User") + + it "Primary key reference works" $ do + fieldReference profileRef + `shouldBe` + ForeignRef + (EntityNameHS "MBDog") + + it "Thing ref works (same block)" $ do + fieldReference thingRef + `shouldBe` + ForeignRef + (EntityNameHS "Thing") + + it "ThingAuto ref works (same block)" $ do + fieldReference thingAutoRef + `shouldBe` + ForeignRef + (EntityNameHS "ThingAuto") diff --git a/persistent/test/Database/Persist/TH/MultiBlockSpec/Model.hs b/persistent/test/Database/Persist/TH/MultiBlockSpec/Model.hs new file mode 100644 index 000000000..21b571169 --- /dev/null +++ b/persistent/test/Database/Persist/TH/MultiBlockSpec/Model.hs @@ -0,0 +1,45 @@ +{-# LANGUAGE DeriveGeneric #-} +{-# LANGUAGE DataKinds #-} +{-# LANGUAGE DerivingStrategies #-} +{-# LANGUAGE FlexibleContexts #-} +{-# LANGUAGE FlexibleInstances #-} +{-# LANGUAGE GADTs #-} +{-# LANGUAGE GeneralizedNewtypeDeriving #-} +{-# LANGUAGE MultiParamTypeClasses #-} +{-# LANGUAGE OverloadedLabels #-} +{-# LANGUAGE PartialTypeSignatures #-} +{-# LANGUAGE QuasiQuotes #-} +{-# LANGUAGE StandaloneDeriving #-} +{-# LANGUAGE TemplateHaskell #-} +{-# LANGUAGE TypeApplications #-} +{-# LANGUAGE TypeFamilies #-} +{-# LANGUAGE UndecidableInstances #-} + +module Database.Persist.TH.MultiBlockSpec.Model where + +import TemplateTestImports + +share + [ mkPersist sqlSettings + , mkEntityDefList "importDefList" + ] + [persistLowerCase| + +User + name Text + age Int + + deriving Eq Show + +MBDog + name Text + Primary name + +MBCompositePrimary + name Text + age Int + + Primary name age + +|] + diff --git a/persistent/test/Database/Persist/TH/OverloadedLabelSpec.hs b/persistent/test/Database/Persist/TH/OverloadedLabelSpec.hs index c2a4b4411..314871c65 100644 --- a/persistent/test/Database/Persist/TH/OverloadedLabelSpec.hs +++ b/persistent/test/Database/Persist/TH/OverloadedLabelSpec.hs @@ -58,5 +58,5 @@ spec = describe "OverloadedLabels" $ do compiles -compiles :: Expectation -compiles = True `shouldBe` True +compiles :: IO () +compiles = pure () diff --git a/persistent/test/Database/Persist/TH/SharedPrimaryKeyImportedSpec.hs b/persistent/test/Database/Persist/TH/SharedPrimaryKeyImportedSpec.hs index 436ff3620..071069614 100644 --- a/persistent/test/Database/Persist/TH/SharedPrimaryKeyImportedSpec.hs +++ b/persistent/test/Database/Persist/TH/SharedPrimaryKeyImportedSpec.hs @@ -44,11 +44,16 @@ spec = describe "Shared Primary Keys Imported" $ do `shouldBe` sqlType (Proxy @ProfileId) - describe "entityId FieldDef" $ do + describe "getEntityId FieldDef" $ do it "should match underlying primary key" $ do - let getSqlType :: PersistEntity a => Proxy a -> SqlType - getSqlType = - fieldSqlType . entityId . entityDef + let + getSqlType :: PersistEntity a => Proxy a -> SqlType + getSqlType p = + case getEntityId (entityDef p) of + EntityIdField fd -> + fieldSqlType fd + _ -> + SqlOther "Composite Key" getSqlType (Proxy @User) `shouldBe` getSqlType (Proxy @Profile) diff --git a/persistent/test/Database/Persist/TH/SharedPrimaryKeySpec.hs b/persistent/test/Database/Persist/TH/SharedPrimaryKeySpec.hs index 6fcd39b1f..128bcd7d7 100644 --- a/persistent/test/Database/Persist/TH/SharedPrimaryKeySpec.hs +++ b/persistent/test/Database/Persist/TH/SharedPrimaryKeySpec.hs @@ -15,9 +15,11 @@ module Database.Persist.TH.SharedPrimaryKeySpec where import TemplateTestImports +import Data.Time import Data.Proxy import Test.Hspec import Database.Persist +import Database.Persist.EntityDef import Database.Persist.Sql import Database.Persist.Sql.Util import Database.Persist.TH @@ -27,31 +29,127 @@ share [ mkPersist sqlSettings ] [persistLowerCase| User name String --- TODO: uncomment this out https://github.com/yesodweb/persistent/issues/1149 --- Profile --- Id UserId --- email String - Profile + Id UserId + email String + +Profile2 Id (Key User) email String +DayKeyTable + Id Day + name Text + +RefDayKey + dayKey DayKeyTableId + |] spec :: Spec spec = describe "Shared Primary Keys" $ do + let + getSqlType :: PersistEntity a => Proxy a -> SqlType + getSqlType p = + case getEntityId (entityDef p) of + EntityIdField fd -> + fieldSqlType fd + _ -> + SqlOther "Composite Key" + + keyProxy :: Proxy a -> Proxy (Key a) + keyProxy _ = Proxy + + sqlTypeEquivalent + :: (PersistFieldSql (Key a), PersistEntity a) + => Proxy a + -> Expectation + sqlTypeEquivalent proxy = + sqlType (keyProxy proxy) `shouldBe` getSqlType proxy + testSqlTypeEquivalent + :: (PersistFieldSql (Key a), PersistEntity a) + => Proxy a + -> Spec + testSqlTypeEquivalent prxy = + it "has equivalent SqlType from sqlType and entityId" $ + sqlTypeEquivalent prxy describe "PersistFieldSql" $ do it "should match underlying key" $ do sqlType (Proxy @UserId) `shouldBe` sqlType (Proxy @ProfileId) - describe "entityId FieldDef" $ do + describe "User" $ do + it "has default ID key, SqlInt64" $ do + sqlType (Proxy @UserId) + `shouldBe` + SqlInt64 + + testSqlTypeEquivalent (Proxy @User) + + describe "Profile" $ do + it "has same ID key type as User" $ do + sqlType (Proxy @ProfileId) + `shouldBe` + sqlType (Proxy @UserId) + testSqlTypeEquivalent(Proxy @Profile) + + describe "Profile2" $ do + it "has same ID key type as User" $ do + sqlType (Proxy @Profile2Id) + `shouldBe` + sqlType (Proxy @UserId) + testSqlTypeEquivalent (Proxy @Profile2) + + describe "getEntityId FieldDef" $ do it "should match underlying primary key" $ do - let getSqlType :: PersistEntity a => Proxy a -> SqlType - getSqlType = - fieldSqlType . entityId . entityDef getSqlType (Proxy @User) `shouldBe` getSqlType (Proxy @Profile) + + describe "DayKeyTable" $ do + testSqlTypeEquivalent (Proxy @DayKeyTable) + + it "sqlType has Day type" $ do + sqlType (Proxy @Day) + `shouldBe` + sqlType (Proxy @DayKeyTableId) + + it "getSqlType has Day type" $ do + sqlType (Proxy @Day) + `shouldBe` + getSqlType (Proxy @DayKeyTable) + + describe "RefDayKey" $ do + let + [dayKeyField] = + getEntityFields (entityDef (Proxy @RefDayKey)) + testSqlTypeEquivalent (Proxy @RefDayKey) + + it "has same sqltype as underlying" $ do + fieldSqlType dayKeyField + `shouldBe` + sqlType (Proxy @Day) + + it "has the right fieldType" $ do + fieldType dayKeyField + `shouldBe` + FTTypeCon Nothing "DayKeyTableId" + + it "has the right type" $ do + let + _ = + refDayKeyDayKey + :: RefDayKey -> DayKeyTableId + _ = + RefDayKeyDayKey + :: EntityField RefDayKey DayKeyTableId + True `shouldBe` True + + it "has a foreign ref" $ do + case fieldReference dayKeyField of + ForeignRef refName -> do + refName `shouldBe` EntityNameHS "DayKeyTable" + other -> + fail $ "expected foreign ref, got: " <> show other diff --git a/persistent/test/Database/Persist/TH/ToFromPersistValuesSpec.hs b/persistent/test/Database/Persist/TH/ToFromPersistValuesSpec.hs new file mode 100644 index 000000000..c632d47ea --- /dev/null +++ b/persistent/test/Database/Persist/TH/ToFromPersistValuesSpec.hs @@ -0,0 +1,274 @@ +{-# LANGUAGE DataKinds, ScopedTypeVariables #-} +{-# LANGUAGE DeriveGeneric #-} +{-# LANGUAGE DerivingStrategies #-} +{-# LANGUAGE ExistentialQuantification #-} +{-# LANGUAGE FlexibleInstances #-} +{-# LANGUAGE GeneralizedNewtypeDeriving #-} +{-# LANGUAGE MultiParamTypeClasses #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE QuasiQuotes #-} +{-# LANGUAGE RecordWildCards #-} +{-# LANGUAGE StandaloneDeriving #-} +{-# LANGUAGE TemplateHaskell #-} +{-# LANGUAGE TypeApplications #-} +{-# LANGUAGE TypeFamilies #-} +{-# LANGUAGE UndecidableInstances #-} +-- +-- DeriveAnyClass is not actually used by persistent-template +-- But a long standing bug was that if it was enabled, it was used to derive instead of GeneralizedNewtypeDeriving +-- This was fixed by using DerivingStrategies to specify newtype deriving should be used. +-- This pragma is left here as a "test" that deriving works when DeriveAnyClass is enabled. +-- See https://github.com/yesodweb/persistent/issues/578 +{-# LANGUAGE DeriveAnyClass #-} + +module Database.Persist.TH.ToFromPersistValuesSpec where + +import TemplateTestImports + +import Database.Persist.Sql.Util +import Database.Persist.Class.PersistEntity +import Data.List.NonEmpty (NonEmpty(..)) +import qualified Data.List.NonEmpty as NEL + +instance PersistFieldSql a => PersistFieldSql (NonEmpty a) where + sqlType _ = SqlString + +instance PersistField a => PersistField (NonEmpty a) where + toPersistValue = toPersistValue . NEL.toList + fromPersistValue pv = do + xs <- fromPersistValue pv + case xs of + [] -> Left "PersistField: NonEmpty found unexpected Empty List" + (l:ls) -> Right (l:|ls) + +mkPersist sqlSettings [persistLowerCase| + +NormalModel + name Text + age Int + deriving Eq Show + +PrimaryModel + name Text + age Int + Primary name age + deriving Eq Show + +IsMigrationOnly + name Text + age Int + blargh Int MigrationOnly + deriving Eq Show + +HasListField + names [Text] + deriving Eq Show + +HasNonEmptyListField + names (NonEmpty Text) + deriving Eq Show + +HasNonEmptyListKeyField + names (NonEmpty (Key NormalModel)) + deriving Eq Show +|] + +spec :: Spec +spec = describe "{to,from}PersistValues" $ do + let + toPersistValues + :: PersistEntity rec => rec -> [PersistValue] + toPersistValues = + map toPersistValue . toPersistFields + + subject + :: (PersistEntity rec, Show rec, Eq rec) + => rec + -> [PersistValue] + -> Spec + subject model fields = do + it "toPersistValues" $ do + toPersistValues model + `shouldBe` + fields + it "fromPersistValues" $ do + fromPersistValues fields + `shouldBe` + Right model + describe "NormalModel" $ do + subject + (NormalModel "hello" 30) + [ PersistText "hello" + , PersistInt64 30 + ] + + describe "PrimaryModel" $ do + subject + (PrimaryModel "hello" 30) + [ PersistText "hello" + , PersistInt64 30 + ] + + describe "IsMigrationOnly" $ do + subject + (IsMigrationOnly "hello" 30) + [ PersistText "hello" + , PersistInt64 30 + ] + + describe "mkInsertValues" $ do + describe "NormalModel" $ do + it "has all values" $ do + mkInsertValues (NormalModel "hello" 30) + `shouldBe` + [ PersistText "hello" + , PersistInt64 30 + ] + describe "PrimaryModel" $ do + it "has all values" $ do + mkInsertValues (PrimaryModel "hello" 30) + `shouldBe` + [ PersistText "hello" + , PersistInt64 30 + ] + describe "IsMigrationOnly" $ do + it "has all values" $ do + mkInsertValues (IsMigrationOnly "hello" 30) + `shouldBe` + [ PersistText "hello" + , PersistInt64 30 + ] + describe "parseEntityValues" $ do + let + subject + :: forall rec. (PersistEntity rec, Show rec, Eq rec) + => [PersistValue] + -> Entity rec + -> Spec + subject pvs rec = + it "parses" $ do + parseEntityValues (entityDef (Proxy @rec)) pvs + `shouldBe` + Right rec + describe "NormalModel" $ do + subject + [ PersistInt64 20 + , PersistText "hello" + , PersistInt64 30 + ] + Entity + { entityKey = + NormalModelKey 20 + , entityVal = + NormalModel "hello" 30 + } + describe "PrimaryModel" $ do + subject + [ PersistText "hey" + , PersistInt64 30 + ] + Entity + { entityKey = + PrimaryModelKey "hey" 30 + , entityVal = + PrimaryModel "hey" 30 + } + describe "IsMigrationOnly" $ do + subject + [ PersistInt64 20 + , PersistText "hello" + , PersistInt64 30 + ] + Entity + { entityKey = + IsMigrationOnlyKey 20 + , entityVal = + IsMigrationOnly "hello" 30 + } + describe "entityValues" $ do + let + subject + :: forall rec. (PersistEntity rec, Show rec, Eq rec) + => [PersistValue] + -> Entity rec + -> Spec + subject pvals entity = do + it "renders as you would expect"$ do + entityValues entity + `shouldBe` + pvals + it "round trips with parseEntityValues" $ do + parseEntityValues + (entityDef $ Proxy @rec) + (entityValues entity) + `shouldBe` + Right entity + describe "NormalModel" $ do + subject + [ PersistInt64 10 + , PersistText "hello" + , PersistInt64 20 + ] + Entity + { entityKey = + NormalModelKey 10 + , entityVal = + NormalModel "hello" 20 + } + describe "PrimaryModel" $ do + subject + [ PersistText "hello" + , PersistInt64 20 + ] + Entity + { entityKey = + PrimaryModelKey "hello" 20 + , entityVal = + PrimaryModel "hello" 20 + } + describe "IsMigrationOnly" $ do + subject + [ PersistInt64 20 + , PersistText "hello" + , PersistInt64 20 + ] + Entity + { entityKey = + IsMigrationOnlyKey 20 + , entityVal = + IsMigrationOnly "hello" 20 + } + + describe "HasListField" $ do + subject + [ PersistInt64 10 + , PersistList [PersistText "hello"] + ] + Entity + { entityKey = + HasListFieldKey 10 + , entityVal = + HasListField ["hello"] + } + describe "HasNonEmptyListField" $ do + subject + [ PersistInt64 10 + , PersistList [PersistText "hello"] + ] + Entity + { entityKey = + HasNonEmptyListFieldKey 10 + , entityVal = + HasNonEmptyListField (pure "hello") + } + describe "HasNonEmptyListKeyField" $ do + subject + [ PersistInt64 5 + , PersistList [PersistInt64 4] + ] + Entity + { entityKey = + HasNonEmptyListKeyFieldKey 5 + , entityVal = + HasNonEmptyListKeyField (pure (NormalModelKey 4)) + } diff --git a/persistent/test/Database/Persist/THSpec.hs b/persistent/test/Database/Persist/THSpec.hs index a06fb36bd..422bc0dd9 100644 --- a/persistent/test/Database/Persist/THSpec.hs +++ b/persistent/test/Database/Persist/THSpec.hs @@ -1,16 +1,18 @@ -{-# LANGUAGE TypeApplications, DeriveGeneric, RecordWildCards #-} +{-# LANGUAGE DataKinds #-} +{-# LANGUAGE DeriveGeneric #-} +{-# LANGUAGE DerivingStrategies #-} {-# LANGUAGE ExistentialQuantification #-} {-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE GeneralizedNewtypeDeriving #-} {-# LANGUAGE MultiParamTypeClasses #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE QuasiQuotes #-} +{-# LANGUAGE RecordWildCards #-} +{-# LANGUAGE StandaloneDeriving #-} {-# LANGUAGE TemplateHaskell #-} +{-# LANGUAGE TypeApplications #-} {-# LANGUAGE TypeFamilies #-} {-# LANGUAGE UndecidableInstances #-} -{-# LANGUAGE DerivingStrategies #-} -{-# LANGUAGE StandaloneDeriving #-} -{-# language DataKinds #-} -- -- DeriveAnyClass is not actually used by persistent-template -- But a long standing bug was that if it was enabled, it was used to derive instead of GeneralizedNewtypeDeriving @@ -21,33 +23,50 @@ module Database.Persist.THSpec where -import Data.Int -import Data.Proxy -import Control.Applicative (Const (..)) +import Control.Applicative (Const(..)) import Data.Aeson import Data.ByteString.Lazy.Char8 () -import Data.Functor.Identity (Identity (..)) +import Data.Coerce +import Data.Functor.Identity (Identity(..)) +import Data.Int +import qualified Data.List as List +import Data.Proxy import Data.Text (Text, pack) +import GHC.Generics (Generic) import Test.Hspec import Test.Hspec.QuickCheck import Test.QuickCheck.Arbitrary import Test.QuickCheck.Gen (Gen) -import GHC.Generics (Generic) -import qualified Data.List as List -import Data.Coerce import Database.Persist +import Database.Persist.EntityDef.Internal import Database.Persist.Sql import Database.Persist.Sql.Util import Database.Persist.TH import TemplateTestImports +import qualified Database.Persist.TH.DiscoverEntitiesSpec as DiscoverEntitiesSpec +import qualified Database.Persist.TH.EmbedSpec as EmbedSpec +import qualified Database.Persist.TH.ForeignRefSpec as ForeignRefSpec +import qualified Database.Persist.TH.ImplicitIdColSpec as ImplicitIdColSpec +import qualified Database.Persist.TH.JsonEncodingSpec as JsonEncodingSpec +import qualified Database.Persist.TH.KindEntitiesSpec as KindEntitiesSpec +import qualified Database.Persist.TH.MigrationOnlySpec as MigrationOnlySpec +import qualified Database.Persist.TH.MultiBlockSpec as MultiBlockSpec +import qualified Database.Persist.TH.OverloadedLabelSpec as OverloadedLabelSpec +import qualified Database.Persist.TH.SharedPrimaryKeyImportedSpec as SharedPrimaryKeyImportedSpec +import qualified Database.Persist.TH.SharedPrimaryKeySpec as SharedPrimaryKeySpec +import qualified Database.Persist.TH.ToFromPersistValuesSpec as ToFromPersistValuesSpec import qualified Database.Persist.TH.KindEntitiesSpec as KindEntitiesSpec import qualified Database.Persist.TH.OverloadedLabelSpec as OverloadedLabelSpec import qualified Database.Persist.TH.SharedPrimaryKeyImportedSpec as SharedPrimaryKeyImportedSpec import qualified Database.Persist.TH.SharedPrimaryKeySpec as SharedPrimaryKeySpec -share [mkPersist sqlSettings { mpsGeneric = False, mpsDeriveInstances = [''Generic] }, mkDeleteCascade sqlSettings { mpsGeneric = False }] [persistUpperCase| +-- test to ensure we can have types ending in Id that don't trash the TH +-- machinery +type TextId = Text + +share [mkPersist sqlSettings { mpsGeneric = False, mpsDeriveInstances = [''Generic] }] [persistUpperCase| Person json name Text @@ -81,6 +100,10 @@ HasMultipleColPrimaryDef barbaz String Primary foobar barbaz +TestDefaultKeyCol + Id TestDefaultKeyColId + name String + HasIdDef Id Int name String @@ -93,7 +116,7 @@ HasCustomSqlId name String SharedPrimaryKey - Id (Key HasDefaultId) + Id HasDefaultIdId name String SharedPrimaryKeyWithCascade @@ -103,6 +126,22 @@ SharedPrimaryKeyWithCascade SharedPrimaryKeyWithCascadeAndCustomName Id (Key HasDefaultId) OnDeleteCascade sql=my_id name String + +Top + name Text + +Middle + top TopId + Primary top + +Bottom + middle MiddleId + Primary middle + +-- Test that a field can be named Key +KeyTable + key Text + |] share [mkPersist sqlSettings { mpsGeneric = False, mpsGenerateLenses = True }] [persistLowerCase| @@ -131,13 +170,34 @@ instance Arbitrary Address where arbitrary = Address <$> arbitraryT <*> arbitraryT <*> arbitrary spec :: Spec -spec = do +spec = describe "THSpec" $ do KindEntitiesSpec.spec OverloadedLabelSpec.spec SharedPrimaryKeySpec.spec SharedPrimaryKeyImportedSpec.spec + ImplicitIdColSpec.spec + MigrationOnlySpec.spec + EmbedSpec.spec + DiscoverEntitiesSpec.spec + MultiBlockSpec.spec + ForeignRefSpec.spec + ToFromPersistValuesSpec.spec + JsonEncodingSpec.spec + describe "TestDefaultKeyCol" $ do + let EntityIdField FieldDef{..} = + entityId (entityDef (Proxy @TestDefaultKeyCol)) + it "should be a BackendKey SqlBackend" $ do + -- the purpose of this test is to verify that a custom Id column of + -- the form: + -- > ModelName + -- > Id ModelNameId + -- + -- should behave like an implicit id column. + (TestDefaultKeyColKey (SqlBackendKey 32) :: Key TestDefaultKeyCol) + `shouldBe` + (toSqlKey 32 :: Key TestDefaultKeyCol) describe "HasDefaultId" $ do - let FieldDef{..} = + let EntityIdField FieldDef{..} = entityId (entityDef (Proxy @HasDefaultId)) it "should have usual db name" $ do fieldDB `shouldBe` FieldNameDB "id" @@ -151,23 +211,23 @@ spec = do fieldType `shouldBe` FTTypeCon Nothing "HasDefaultIdId" describe "HasCustomSqlId" $ do - let FieldDef{..} = + let EntityIdField FieldDef{..} = entityId (entityDef (Proxy @HasCustomSqlId)) it "should have custom db name" $ do fieldDB `shouldBe` FieldNameDB "my_id" it "should have usual haskell name" $ do - fieldHaskell `shouldBe` FieldNameHS "id" + fieldHaskell `shouldBe` FieldNameHS "Id" it "should have correct underlying sql type" $ do fieldSqlType `shouldBe` SqlString it "should have correct haskell type" $ do fieldType `shouldBe` FTTypeCon Nothing "String" describe "HasIdDef" $ do - let FieldDef{..} = + let EntityIdField FieldDef{..} = entityId (entityDef (Proxy @HasIdDef)) it "should have usual db name" $ do fieldDB `shouldBe` FieldNameDB "id" it "should have usual haskell name" $ do - fieldHaskell `shouldBe` FieldNameHS "id" + fieldHaskell `shouldBe` FieldNameHS "Id" it "should have correct underlying sql type" $ do fieldSqlType `shouldBe` SqlInt64 it "should have correct haskell type" $ do @@ -175,16 +235,18 @@ spec = do describe "SharedPrimaryKey" $ do let sharedDef = entityDef (Proxy @SharedPrimaryKey) - FieldDef{..} = + EntityIdField FieldDef{..} = entityId sharedDef it "should have usual db name" $ do fieldDB `shouldBe` FieldNameDB "id" it "should have usual haskell name" $ do - fieldHaskell `shouldBe` FieldNameHS "id" + fieldHaskell `shouldBe` FieldNameHS "Id" it "should have correct underlying sql type" $ do fieldSqlType `shouldBe` SqlInt64 + it "should have correct underlying (as reported by sqltype)" $ do + fieldSqlType `shouldBe` sqlType (Proxy :: Proxy HasDefaultIdId) it "should have correct haskell type" $ do - fieldType `shouldBe` FTApp (FTTypeCon Nothing "Key") (FTTypeCon Nothing "HasDefaultId") + fieldType `shouldBe` (FTTypeCon Nothing "HasDefaultIdId") it "should have correct sql type from PersistFieldSql" $ do sqlType (Proxy @SharedPrimaryKeyId) `shouldBe` @@ -198,18 +260,13 @@ spec = do `shouldBe` SharedPrimaryKeyKey (toSqlKey 3) - it "is a newtype" $ do - pkNewtype sqlSettings sharedDef - `shouldBe` - True - describe "SharedPrimaryKeyWithCascade" $ do - let FieldDef{..} = + let EntityIdField FieldDef{..} = entityId (entityDef (Proxy @SharedPrimaryKeyWithCascade)) it "should have usual db name" $ do fieldDB `shouldBe` FieldNameDB "id" it "should have usual haskell name" $ do - fieldHaskell `shouldBe` FieldNameHS "id" + fieldHaskell `shouldBe` FieldNameHS "Id" it "should have correct underlying sql type" $ do fieldSqlType `shouldBe` SqlInt64 it "should have correct haskell type" $ do @@ -240,18 +297,19 @@ spec = do { entityHaskell = EntityNameHS "HasSimpleCascadeRef" , entityDB = EntityNameDB "HasSimpleCascadeRef" , entityId = - FieldDef + EntityIdField FieldDef { fieldHaskell = FieldNameHS "Id" , fieldDB = FieldNameDB "id" , fieldType = FTTypeCon Nothing "HasSimpleCascadeRefId" , fieldSqlType = SqlInt64 , fieldReference = - ForeignRef (EntityNameHS "HasSimpleCascadeRef") (FTTypeCon (Just "Data.Int") "Int64") + NoReference , fieldAttrs = [] , fieldStrict = True , fieldComments = Nothing , fieldCascade = noCascade , fieldGenerated = Nothing + , fieldIsImplicitIdColumn = True } , entityAttrs = [] , entityFields = @@ -265,11 +323,11 @@ spec = do , fieldReference = ForeignRef (EntityNameHS "Person") - (FTTypeCon (Just "Data.Int") "Int64") , fieldCascade = FieldCascade { fcOnUpdate = Nothing, fcOnDelete = Just Cascade } , fieldComments = Nothing , fieldGenerated = Nothing + , fieldIsImplicitIdColumn = False } ] , entityUniques = [] diff --git a/persistent/test/TemplateTestImports.hs b/persistent/test/TemplateTestImports.hs index 6be306b72..5f4886f7e 100644 --- a/persistent/test/TemplateTestImports.hs +++ b/persistent/test/TemplateTestImports.hs @@ -1,3 +1,5 @@ +{-# LANGUAGE DerivingStrategies #-} +{-# LANGUAGE GeneralizedNewtypeDeriving #-} {-# LANGUAGE TemplateHaskell #-} module TemplateTestImports @@ -8,9 +10,15 @@ module TemplateTestImports import Data.Aeson.TH import Test.QuickCheck -import Test.Hspec as X +import Data.Int as X import Database.Persist.Sql as X import Database.Persist.TH as X +import Test.Hspec as X +import Data.Proxy as X +import Data.Text as X (Text) +import Data.Maybe +import Control.Monad +import Language.Haskell.TH.Syntax data Foo = Bar | Baz deriving (Show, Eq) diff --git a/persistent/test/main.hs b/persistent/test/main.hs index aa842cf47..b898bc84d 100644 --- a/persistent/test/main.hs +++ b/persistent/test/main.hs @@ -1,913 +1,16 @@ -{-# LANGUAGE CPP #-} -{-# LANGUAGE OverloadedLists #-} -{-# LANGUAGE QuasiQuotes #-} -{-# LANGUAGE RecordWildCards #-} +module Main where -import qualified Data.Char as Char -import Data.List -import Data.List.NonEmpty (NonEmpty(..), (<|)) -import qualified Data.List.NonEmpty as NEL -import qualified Data.Map as Map -import qualified Data.Text as T import Test.Hspec -import Test.Hspec.QuickCheck -import Test.QuickCheck -#if !MIN_VERSION_base(4,11,0) --- This can be removed when GHC < 8.2.2 isn't supported anymore -import Data.Semigroup ((<>)) -#endif -import Data.Aeson -import qualified Data.ByteString.Char8 as BS8 -import Data.Time -import Text.Shakespeare.Text - -import Database.Persist.Class.PersistField -import Database.Persist.Quasi -import Database.Persist.Types +import qualified Database.Persist.ClassSpec as ClassSpec +import qualified Database.Persist.PersistValueSpec as PersistValueSpec +import qualified Database.Persist.QuasiSpec as QuasiSpec import qualified Database.Persist.THSpec as THSpec main :: IO () main = hspec $ do - describe "Database.Persist" $ do - describe "THSpec" THSpec.spec - - THSpec.spec - describe "splitExtras" $ do - let helloWorldTokens = Token "hello" :| [Token "world"] - foobarbazTokens = Token "foo" :| [Token "bar", Token "baz"] - it "works" $ do - splitExtras [] - `shouldBe` - mempty - it "works2" $ do - splitExtras - [ Line 0 helloWorldTokens - ] - `shouldBe` - ( [NEL.toList helloWorldTokens], mempty ) - it "works3" $ do - splitExtras - [ Line 0 helloWorldTokens - , Line 2 foobarbazTokens - ] - `shouldBe` - ( [NEL.toList helloWorldTokens, NEL.toList foobarbazTokens], mempty ) - it "works4" $ do - splitExtras - [ Line 0 [Token "Product"] - , Line 2 (Token <$> ["name", "Text"]) - , Line 2 (Token <$> ["added", "UTCTime", "default=CURRENT_TIMESTAMP"]) - ] - `shouldBe` - ( [] - , Map.fromList - [ ("Product", - [ ["name", "Text"] - , ["added", "UTCTime", "default=CURRENT_TIMESTAMP"] - ] - ) ] - ) - it "works5" $ do - splitExtras - [ Line 0 [Token "Product"] - , Line 2 (Token <$> ["name", "Text"]) - , Line 4 [Token "ExtraBlock"] - , Line 4 (Token <$> ["added", "UTCTime", "default=CURRENT_TIMESTAMP"]) - ] - `shouldBe` - ( [] - , Map.fromList - [ ("Product", - [ ["name", "Text"] - , ["ExtraBlock"] - , ["added", "UTCTime", "default=CURRENT_TIMESTAMP"] - ] - )] - ) - describe "takeColsEx" $ do - let subject = takeColsEx upperCaseSettings - it "fails on a single word" $ do - subject ["asdf"] - `shouldBe` - Nothing - it "works if it has a name and a type" $ do - subject ["asdf", "Int"] - `shouldBe` - Just FieldDef - { fieldHaskell = FieldNameHS "asdf" - , fieldDB = FieldNameDB "asdf" - , fieldType = FTTypeCon Nothing "Int" - , fieldSqlType = SqlOther "SqlType unset for asdf" - , fieldAttrs = [] - , fieldStrict = True - , fieldReference = NoReference - , fieldCascade = noCascade - , fieldComments = Nothing - , fieldGenerated = Nothing - } - it "works if it has a name, type, and cascade" $ do - subject ["asdf", "Int", "OnDeleteCascade", "OnUpdateCascade"] - `shouldBe` - Just FieldDef - { fieldHaskell = FieldNameHS "asdf" - , fieldDB = FieldNameDB "asdf" - , fieldType = FTTypeCon Nothing "Int" - , fieldSqlType = SqlOther "SqlType unset for asdf" - , fieldAttrs = [] - , fieldStrict = True - , fieldReference = NoReference - , fieldCascade = FieldCascade (Just Cascade) (Just Cascade) - , fieldComments = Nothing - , fieldGenerated = Nothing - } - it "never tries to make a refernece" $ do - subject ["asdf", "UserId", "OnDeleteCascade"] - `shouldBe` - Just FieldDef - { fieldHaskell = FieldNameHS "asdf" - , fieldDB = FieldNameDB "asdf" - , fieldType = FTTypeCon Nothing "UserId" - , fieldSqlType = SqlOther "SqlType unset for asdf" - , fieldAttrs = [] - , fieldStrict = True - , fieldReference = NoReference - , fieldCascade = FieldCascade Nothing (Just Cascade) - , fieldComments = Nothing - , fieldGenerated = Nothing - } - - describe "parseLine" $ do - it "returns nothing when line is just whitespace" $ - parseLine " " `shouldBe` Nothing - - it "handles normal words" $ - parseLine " foo bar baz" `shouldBe` - Just - ( Line 1 - [ Token "foo" - , Token "bar" - , Token "baz" - ] - ) - - it "handles quotes" $ - parseLine " \"foo bar\" \"baz\"" `shouldBe` - Just - ( Line 2 - [ Token "foo bar" - , Token "baz" - ] - ) - - it "handles quotes mid-token" $ - parseLine " x=\"foo bar\" \"baz\"" `shouldBe` - Just - ( Line 2 - [ Token "x=foo bar" - , Token "baz" - ] - ) - - it "handles escaped quote mid-token" $ - parseLine " x=\\\"foo bar\" \"baz\"" `shouldBe` - Just - ( Line 2 - [ Token "x=\\\"foo" - , Token "bar\"" - , Token "baz" - ] - ) - - it "handles unnested parantheses" $ - parseLine " (foo bar) (baz)" `shouldBe` - Just - ( Line 2 - [ Token "foo bar" - , Token "baz" - ] - ) - - it "handles unnested parantheses mid-token" $ - parseLine " x=(foo bar) (baz)" `shouldBe` - Just - ( Line 2 - [ Token "x=foo bar" - , Token "baz" - ] - ) - - it "handles nested parantheses" $ - parseLine " (foo (bar)) (baz)" `shouldBe` - Just - ( Line 2 - [ Token "foo (bar)" - , Token "baz" - ] - ) - - it "escaping" $ - parseLine " (foo \\(bar) y=\"baz\\\"\"" `shouldBe` - Just - ( Line 2 - [ Token "foo (bar" - , Token "y=baz\"" - ] - ) - - it "mid-token quote in later token" $ - parseLine "foo bar baz=(bin\")" `shouldBe` - Just - ( Line 0 - [ Token "foo" - , Token "bar" - , Token "baz=bin\"" - ] - ) - - describe "comments" $ do - it "recognizes one line" $ do - parseLine "-- | this is a comment" `shouldBe` - Just - ( Line 0 - [ DocComment "this is a comment" - ] - ) - - it "works if comment is indented" $ do - parseLine " -- | comment" `shouldBe` - Just (Line 2 [DocComment "comment"]) - - describe "parse" $ do - let subject = - [st| -Bicycle -- | this is a bike - brand String -- | the brand of the bike - ExtraBike - foo bar -- | this is a foo bar - baz - deriving Eq --- | This is a Car -Car - -- | the make of the Car - make String - -- | the model of the Car - model String - UniqueModel model - deriving Eq Show -+Vehicle - bicycle BicycleId -- | the bike reference - car CarId -- | the car reference - - |] - let [bicycle, car, vehicle] = parse lowerCaseSettings subject - - it "should parse the `entityHaskell` field" $ do - entityHaskell bicycle `shouldBe` EntityNameHS "Bicycle" - entityHaskell car `shouldBe` EntityNameHS "Car" - entityHaskell vehicle `shouldBe` EntityNameHS "Vehicle" - - it "should parse the `entityDB` field" $ do - entityDB bicycle `shouldBe` EntityNameDB "bicycle" - entityDB car `shouldBe` EntityNameDB "car" - entityDB vehicle `shouldBe` EntityNameDB "vehicle" - - it "should parse the `entityId` field" $ do - fieldHaskell (entityId bicycle) `shouldBe` FieldNameHS "Id" - fieldComments (entityId bicycle) `shouldBe` Nothing - fieldHaskell (entityId car) `shouldBe` FieldNameHS "Id" - fieldComments (entityId car) `shouldBe` Nothing - fieldHaskell (entityId vehicle) `shouldBe` FieldNameHS "Id" - fieldComments (entityId vehicle) `shouldBe` Nothing - - it "should parse the `entityAttrs` field" $ do - entityAttrs bicycle `shouldBe` ["-- | this is a bike"] - entityAttrs car `shouldBe` [] - entityAttrs vehicle `shouldBe` [] - - it "should parse the `entityFields` field" $ do - let simplifyField field = - (fieldHaskell field, fieldDB field, fieldComments field) - (simplifyField <$> entityFields bicycle) `shouldBe` - [ (FieldNameHS "brand", FieldNameDB "brand", Nothing) - ] - (simplifyField <$> entityFields car) `shouldBe` - [ (FieldNameHS "make", FieldNameDB "make", Just "the make of the Car\n") - , (FieldNameHS "model", FieldNameDB "model", Just "the model of the Car\n") - ] - (simplifyField <$> entityFields vehicle) `shouldBe` - [ (FieldNameHS "bicycle", FieldNameDB "bicycle", Nothing) - , (FieldNameHS "car", FieldNameDB "car", Nothing) - ] - - it "should parse the `entityUniques` field" $ do - let simplifyUnique unique = - (uniqueHaskell unique, uniqueFields unique) - (simplifyUnique <$> entityUniques bicycle) `shouldBe` [] - (simplifyUnique <$> entityUniques car) `shouldBe` - [ (ConstraintNameHS "UniqueModel", [(FieldNameHS "model", FieldNameDB "model")]) - ] - (simplifyUnique <$> entityUniques vehicle) `shouldBe` [] - - it "should parse the `entityForeigns` field" $ do - let [user, notification] = parse lowerCaseSettings [st| -User - name Text - emailFirst Text - emailSecond Text - - UniqueEmail emailFirst emailSecond - -Notification - content Text - sentToFirst Text - sentToSecond Text - - Foreign User fk_noti_user sentToFirst sentToSecond References emailFirst emailSecond -|] - entityForeigns user `shouldBe` [] - entityForeigns notification `shouldBe` - [ ForeignDef - { foreignRefTableHaskell = EntityNameHS "User" - , foreignRefTableDBName = EntityNameDB "user" - , foreignConstraintNameHaskell = ConstraintNameHS "fk_noti_user" - , foreignConstraintNameDBName = ConstraintNameDB "notificationfk_noti_user" - , foreignFieldCascade = FieldCascade Nothing Nothing - , foreignFields = - [ ((FieldNameHS "sentToFirst", FieldNameDB "sent_to_first"), (FieldNameHS "emailFirst", FieldNameDB "email_first")) - , ((FieldNameHS "sentToSecond", FieldNameDB "sent_to_second"), (FieldNameHS "emailSecond", FieldNameDB "email_second")) - ] - , foreignAttrs = [] - , foreignNullable = False - , foreignToPrimary = False - } - ] - - it "should parse the `entityDerives` field" $ do - entityDerives bicycle `shouldBe` ["Eq"] - entityDerives car `shouldBe` ["Eq", "Show"] - entityDerives vehicle `shouldBe` [] - - it "should parse the `entityEntities` field" $ do - entityExtra bicycle `shouldBe` Map.singleton "ExtraBike" [["foo", "bar", "-- | this is a foo bar"], ["baz"]] - entityExtra car `shouldBe` mempty - entityExtra vehicle `shouldBe` mempty - - it "should parse the `entitySum` field" $ do - entitySum bicycle `shouldBe` False - entitySum car `shouldBe` False - entitySum vehicle `shouldBe` True - - it "should parse the `entityComments` field" $ do - entityComments bicycle `shouldBe` Nothing - entityComments car `shouldBe` Just "This is a Car\n" - entityComments vehicle `shouldBe` Nothing - - describe "ticked types" $ do - it "should be able to parse ticked types" $ do - let simplifyField field = - (fieldHaskell field, fieldType field) - let tickedDefinition = [st| -CustomerTransfer - customerId CustomerId - moneyAmount (MoneyAmount 'Customer 'Debit) - currencyCode CurrencyCode - uuid TransferUuid -|] - let [customerTransfer] = parse lowerCaseSettings tickedDefinition - let expectedType = - FTTypeCon Nothing "MoneyAmount" `FTApp` FTTypePromoted "Customer" `FTApp` FTTypePromoted "Debit" - - (simplifyField <$> entityFields customerTransfer) `shouldBe` - [ (FieldNameHS "customerId", FTTypeCon Nothing "CustomerId") - , (FieldNameHS "moneyAmount", expectedType) - , (FieldNameHS "currencyCode", FTTypeCon Nothing "CurrencyCode") - , (FieldNameHS "uuid", FTTypeCon Nothing "TransferUuid") - ] - - describe "parseFieldType" $ do - it "simple types" $ - parseFieldType "FooBar" `shouldBe` Right (FTTypeCon Nothing "FooBar") - it "module types" $ - parseFieldType "Data.Map.FooBar" `shouldBe` Right (FTTypeCon (Just "Data.Map") "FooBar") - it "application" $ - parseFieldType "Foo Bar" `shouldBe` Right ( - FTTypeCon Nothing "Foo" `FTApp` FTTypeCon Nothing "Bar") - it "application multiple" $ - parseFieldType "Foo Bar Baz" `shouldBe` Right ( - (FTTypeCon Nothing "Foo" `FTApp` FTTypeCon Nothing "Bar") - `FTApp` FTTypeCon Nothing "Baz" - ) - it "parens" $ do - let foo = FTTypeCon Nothing "Foo" - bar = FTTypeCon Nothing "Bar" - baz = FTTypeCon Nothing "Baz" - parseFieldType "Foo (Bar Baz)" `shouldBe` Right ( - foo `FTApp` (bar `FTApp` baz)) - it "lists" $ do - let foo = FTTypeCon Nothing "Foo" - bar = FTTypeCon Nothing "Bar" - bars = FTList bar - baz = FTTypeCon Nothing "Baz" - parseFieldType "Foo [Bar] Baz" `shouldBe` Right ( - foo `FTApp` bars `FTApp` baz) - - describe "#1175 empty entity" $ do - let subject = - [st| -Foo - name String - age Int - -EmptyEntity - -Bar - name String - -Baz - a Int - b String - c FooId - |] - - let preparsed = - preparse subject - it "preparse works" $ do - (length <$> preparsed) `shouldBe` Just 10 - - let fooLines = - [ Line - { lineIndent = 0 - , tokens = Token "Foo" :| [] - } - , Line - { lineIndent = 4 - , tokens = Token "name" :| [Token "String"] - } - , Line - { lineIndent = 4 - , tokens = Token "age" :| [Token "Int"] - } - ] - emptyLines = - [ Line - { lineIndent = 0 - , tokens = Token "EmptyEntity" :| [] - } - ] - barLines = - [ Line - { lineIndent = 0 - , tokens = Token "Bar" :| [] - } - , Line - { lineIndent = 4 - , tokens = Token "name" :| [Token "String"] - } - ] - bazLines = - [ Line - { lineIndent = 0 - , tokens = Token "Baz" :| [] - } - , Line - { lineIndent = 4 - , tokens = Token "a" :| [Token "Int"] - } - , Line - { lineIndent = 4 - , tokens = Token "b" :| [Token "String"] - } - , Line - { lineIndent = 4 - , tokens = Token "c" :| [Token "FooId"] - } - ] - - let linesAssociated = - case preparsed of - Nothing -> error "preparsed failed" - Just lines -> associateLines lines - it "associateLines works" $ do - linesAssociated `shouldMatchList` - [ LinesWithComments - { lwcLines = NEL.fromList fooLines - , lwcComments = [] - } - , LinesWithComments (NEL.fromList emptyLines) [] - , LinesWithComments (NEL.fromList barLines) [] - , LinesWithComments (NEL.fromList bazLines) [] - ] - - let parsed = - parse lowerCaseSettings subject - it "parse works" $ do - let test name'fieldCount xs = do - case (name'fieldCount, xs) of - ([], []) -> - pure () - ((name, fieldCount) : _, []) -> - expectationFailure - $ "Expected an entity with name " - <> name - <> " and " <> show fieldCount <> " fields" - <> ", but the list was empty..." - - ((name, fieldCount) : ys, (EntityDef {..} : xs)) -> do - (unEntityNameHS entityHaskell, length entityFields) - `shouldBe` - (T.pack name, fieldCount) - test ys xs - - result = - parse lowerCaseSettings subject - length parsed `shouldBe` 4 - - test - [ ("Foo", 2) - , ("EmptyEntity", 0) - , ("Bar", 1) - , ("Baz", 3) - ] - parsed - - - describe "preparse" $ do - prop "omits lines that are only whitespace" $ \len -> do - ws <- vectorOf len arbitraryWhiteSpaceChar - pure $ preparse (T.pack ws) === Nothing - - it "recognizes entity" $ do - let expected = - Line { lineIndent = 0, tokens = pure (Token "Person") } :| - [ Line { lineIndent = 2, tokens = Token "name" :| [Token "String"] } - , Line { lineIndent = 2, tokens = Token "age" :| [Token "Int"] } - ] - preparse "Person\n name String\n age Int" `shouldBe` Just expected - - it "recognizes comments" $ do - let text = "Foo\n x X\n-- | Hello\nBar\n name String" - let expected = - Line { lineIndent = 0, tokens = pure (Token "Foo") } :| - [ Line { lineIndent = 2, tokens = Token "x" :| [Token "X"] } - , Line { lineIndent = 0, tokens = pure (DocComment "Hello") } - , Line { lineIndent = 0, tokens = pure (Token "Bar") } - , Line { lineIndent = 1, tokens = Token "name" :| [Token "String"] } - ] - preparse text `shouldBe` Just expected - - it "preparse indented" $ do - let t = T.unlines - [ " Foo" - , " x X" - , " -- | Comment" - , " -- hidden comment" - , " Bar" - , " name String" - ] - expected = - Line { lineIndent = 2, tokens = pure (Token "Foo") } :| - [ Line { lineIndent = 4, tokens = Token "x" :| [Token "X"] } - , Line { lineIndent = 2, tokens = pure (DocComment "Comment") } - , Line { lineIndent = 2, tokens = pure (Token "Bar") } - , Line { lineIndent = 4, tokens = Token "name" :| [Token "String"] } - ] - preparse t `shouldBe` Just expected - - it "preparse extra blocks" $ do - let t = T.unlines - [ "LowerCaseTable" - , " name String" - , " ExtraBlock" - , " foo bar" - , " baz" - , " ExtraBlock2" - , " something" - ] - expected = - Line { lineIndent = 0, tokens = pure (Token "LowerCaseTable") } :| - [ Line { lineIndent = 2, tokens = Token "name" :| [Token "String"] } - , Line { lineIndent = 2, tokens = pure (Token "ExtraBlock") } - , Line { lineIndent = 4, tokens = Token "foo" :| [Token "bar"] } - , Line { lineIndent = 4, tokens = pure (Token "baz") } - , Line { lineIndent = 2, tokens = pure (Token "ExtraBlock2") } - , Line { lineIndent = 4, tokens = pure (Token "something") } - ] - preparse t `shouldBe` Just expected - - it "field comments" $ do - let text = T.unlines - [ "-- | Model" - , "Foo" - , " -- | Field" - , " name String" - ] - expected = - Line { lineIndent = 0, tokens = [DocComment "Model"] } :| - [ Line { lineIndent = 0, tokens = [Token "Foo"] } - , Line { lineIndent = 2, tokens = [DocComment "Field"] } - , Line { lineIndent = 2, tokens = (Token <$> ["name", "String"]) } - ] - preparse text `shouldBe` Just expected - - describe "associateLines" $ do - let foo = - Line - { lineIndent = 0 - , tokens = pure (Token "Foo") - } - name'String = - Line - { lineIndent = 2 - , tokens = Token "name" :| [Token "String"] - } - comment = - Line - { lineIndent = 0 - , tokens = pure (DocComment "comment") - } - it "works" $ do - associateLines - ( comment :| - [ foo - , name'String - ]) - `shouldBe` - [ LinesWithComments - { lwcComments = ["comment"] - , lwcLines = foo :| [name'String] - } - ] - let bar = - Line - { lineIndent = 0 - , tokens = Token "Bar" :| [Token "sql", Token "=", Token "bars"] - } - age'Int = - Line - { lineIndent = 1 - , tokens = Token "age" :| [Token "Int"] - } - it "works when used consecutively" $ do - associateLines - ( bar :| - [ age'Int - , comment - , foo - , name'String - ]) - `shouldBe` - [ LinesWithComments - { lwcComments = [] - , lwcLines = bar :| [age'Int] - } - , LinesWithComments - { lwcComments = ["comment"] - , lwcLines = foo :| [name'String] - } - ] - it "works with textual input" $ do - let text = preparse "Foo\n x X\n-- | Hello\nBar\n name String" - associateLines <$> text - `shouldBe` Just - [ LinesWithComments - { lwcLines = - Line {lineIndent = 0, tokens = Token "Foo" :| []} - :| [ Line {lineIndent = 2, tokens = Token "x" :| [Token "X"]} ] - , lwcComments = - [] - } - , LinesWithComments - { lwcLines = - Line {lineIndent = 0, tokens = Token "Bar" :| []} - :| [ Line {lineIndent = 1, tokens = Token "name" :| [Token "String"]}] - , lwcComments = - ["Hello"] - } - ] - it "works with extra blocks" $ do - let text = preparse . T.unlines $ - [ "LowerCaseTable" - , " Id sql=my_id" - , " fullName Text" - , " ExtraBlock" - , " foo bar" - , " baz" - , " bin" - , " ExtraBlock2" - , " something" - ] - associateLines <$> text `shouldBe` Just - [ LinesWithComments - { lwcLines = - Line { lineIndent = 0, tokens = pure (Token "LowerCaseTable") } :| - [ Line { lineIndent = 4, tokens = Token "Id" :| [Token "sql=my_id"] } - , Line { lineIndent = 4, tokens = Token "fullName" :| [Token "Text"] } - , Line { lineIndent = 4, tokens = pure (Token "ExtraBlock") } - , Line { lineIndent = 8, tokens = Token "foo" :| [Token "bar"] } - , Line { lineIndent = 8, tokens = pure (Token "baz") } - , Line { lineIndent = 8, tokens = pure (Token "bin") } - , Line { lineIndent = 4, tokens = pure (Token "ExtraBlock2") } - , Line { lineIndent = 8, tokens = pure (Token "something") } - ] - , lwcComments = [] - } - ] - - it "works with extra blocks twice" $ do - let text = preparse . T.unlines $ - [ "IdTable" - , " Id Day default=CURRENT_DATE" - , " name Text" - , "" - , "LowerCaseTable" - , " Id sql=my_id" - , " fullName Text" - , " ExtraBlock" - , " foo bar" - , " baz" - , " bin" - , " ExtraBlock2" - , " something" - ] - associateLines <$> text `shouldBe` Just - [ LinesWithComments - { lwcLines = Line 0 (pure (Token "IdTable")) :| - [ Line 4 (Token "Id" <| Token "Day" :| [Token "default=CURRENT_DATE"]) - , Line 4 (Token "name" :| [Token "Text"]) - ] - , lwcComments = [] - } - , LinesWithComments - { lwcLines = - Line { lineIndent = 0, tokens = pure (Token "LowerCaseTable") } :| - [ Line { lineIndent = 4, tokens = Token "Id" :| [Token "sql=my_id"] } - , Line { lineIndent = 4, tokens = Token "fullName" :| [Token "Text"] } - , Line { lineIndent = 4, tokens = pure (Token "ExtraBlock") } - , Line { lineIndent = 8, tokens = Token "foo" :| [Token "bar"] } - , Line { lineIndent = 8, tokens = pure (Token "baz") } - , Line { lineIndent = 8, tokens = pure (Token "bin") } - , Line { lineIndent = 4, tokens = pure (Token "ExtraBlock2") } - , Line { lineIndent = 8, tokens = pure (Token "something") } - ] - , lwcComments = [] - } - ] - - - it "works with field comments" $ do - let text = preparse . T.unlines $ - [ "-- | Model" - , "Foo" - , " -- | Field" - , " name String" - ] - associateLines <$> text `shouldBe` Just - [ LinesWithComments - { lwcLines = - Line { lineIndent = 0, tokens = (Token "Foo") :| [] } :| - [ Line { lineIndent = 2, tokens = pure (DocComment "Field") } - , Line { lineIndent = 2, tokens = Token "name" :| [Token "String"] } - ] - , lwcComments = - ["Model"] - } - ] - - - - describe "parseLines" $ do - let lines = - T.unlines - [ "-- | Comment" - , "Foo" - , " -- | Field" - , " name String" - , " age Int" - , " Extra" - , " foo bar" - , " baz" - , " Extra2" - , " something" - ] - let [subject] = parse lowerCaseSettings lines - it "produces the right name" $ do - entityHaskell subject `shouldBe` EntityNameHS "Foo" - describe "entityFields" $ do - let fields = entityFields subject - it "has the right field names" $ do - map fieldHaskell fields `shouldMatchList` - [ FieldNameHS "name" - , FieldNameHS "age" - ] - it "has comments" $ do - map fieldComments fields `shouldBe` - [ Just "Field\n" - , Nothing - ] - it "has the comments" $ do - entityComments subject `shouldBe` - Just "Comment\n" - it "combines extrablocks" $ do - entityExtra subject `shouldBe` Map.fromList - [ ("Extra", [["foo", "bar"], ["baz"]]) - , ("Extra2", [["something"]]) - ] - describe "works with extra blocks" $ do - let [_, lowerCaseTable, idTable] = - case parse lowerCaseSettings $ T.unlines - [ "" - , "IdTable" - , " Id Day default=CURRENT_DATE" - , " name Text" - , "" - , "LowerCaseTable" - , " Id sql=my_id" - , " fullName Text" - , " ExtraBlock" - , " foo bar" - , " baz" - , " bin" - , " ExtraBlock2" - , " something" - , "" - , "IdTable" - , " Id Day default=CURRENT_DATE" - , " name Text" - , "" - ] of - [a, b, c] -> - [a, b, c] :: [EntityDef] - xs -> - error - $ "Expected 3 elements in list, got: " - <> show (length xs) - <> ", list contents: \n\n" <> intercalate "\n" (map show xs) - describe "idTable" $ do - let EntityDef {..} = idTable - it "has no extra blocks" $ do - entityExtra `shouldBe` mempty - it "has the right name" $ do - entityHaskell `shouldBe` EntityNameHS "IdTable" - it "has the right fields" $ do - map fieldHaskell entityFields `shouldMatchList` - [ FieldNameHS "name" - ] - describe "lowerCaseTable" $ do - let EntityDef {..} = lowerCaseTable - it "has the right name" $ do - entityHaskell `shouldBe` EntityNameHS "LowerCaseTable" - it "has the right fields" $ do - map fieldHaskell entityFields `shouldMatchList` - [ FieldNameHS "fullName" - ] - it "has ExtraBlock" $ do - Map.lookup "ExtraBlock" entityExtra - `shouldBe` Just - [ ["foo", "bar"] - , ["baz"] - , ["bin"] - ] - it "has ExtraBlock2" $ do - Map.lookup "ExtraBlock2" entityExtra - `shouldBe` Just - [ ["something"] - ] - - describe "fromPersistValue" $ - describe "UTCTime" $ - it "works with format" $ - fromPersistValue (PersistText "2018-02-27 10:49:42.123") - `shouldBe` Right (UTCTime (fromGregorian 2018 02 27) (timeOfDayToTime (TimeOfDay 10 49 42.123))) - - describe "PersistValue" $ do - describe "Aeson" $ do - let - testPrefix constr prefixChar bytes = - takePrefix (toJSON (constr (BS8.pack bytes))) - === - String (T.singleton prefixChar) - roundTrip constr bytes = - fromJSON (toJSON (constr (BS8.pack bytes))) - === - Data.Aeson.Success (constr (BS8.pack bytes)) - subject constr prefixChar = do - prop ("encodes with a " ++ [prefixChar] ++ " prefix") $ - testPrefix constr prefixChar - prop "Round Trips" $ - roundTrip constr - - describe "PersistDbSpecific" $ do - subject PersistDbSpecific 'p' - describe "PersistLiteral" $ do - subject PersistLiteral 'l' - describe "PersistLiteralEscaped" $ do - subject PersistLiteralEscaped 'e' - -takePrefix :: Value -> Value -takePrefix (String a) = String (T.take 1 a) -takePrefix a = a - -arbitraryWhiteSpaceChar :: Gen Char -arbitraryWhiteSpaceChar = - oneof $ pure <$> [' ', '\t', '\n', '\r'] + describe "Database" $ describe "Persist" $ do + THSpec.spec + QuasiSpec.spec + ClassSpec.spec + PersistValueSpec.spec diff --git a/stack-8.10.yaml b/stack-8.10.yaml new file mode 100644 index 000000000..4821c74d8 --- /dev/null +++ b/stack-8.10.yaml @@ -0,0 +1,10 @@ +resolver: lts-17.8 +packages: + - ./persistent + - ./persistent-sqlite + - ./persistent-test + - ./persistent-mongoDB + - ./persistent-mysql + - ./persistent-postgresql + - ./persistent-redis + - ./persistent-qq diff --git a/stack-8.10.yaml.lock b/stack-8.10.yaml.lock new file mode 100644 index 000000000..72d7dad32 --- /dev/null +++ b/stack-8.10.yaml.lock @@ -0,0 +1,12 @@ +# This file was autogenerated by Stack. +# You should not edit this file by hand. +# For more information, please see the documentation at: +# https://docs.haskellstack.org/en/stable/lock_files + +packages: [] +snapshots: +- completed: + size: 565720 + url: https://raw.githubusercontent.com/commercialhaskell/stackage-snapshots/master/lts/17/8.yaml + sha256: 76bf8992ff8dfe6eda9c02f81866138c2369344d5011ab39ae403457c4448b03 + original: lts-17.8 diff --git a/stack.yaml b/stack.yaml index c548c33cf..e5a1c6382 100644 --- a/stack.yaml +++ b/stack.yaml @@ -3,8 +3,11 @@ packages: - ./persistent - ./persistent-sqlite - ./persistent-test - - ./persistent-mongoDB + # - ./persistent-mongoDB - ./persistent-mysql - ./persistent-postgresql - ./persistent-redis - ./persistent-qq + +extra-deps: + - lift-type-0.1.0.0 diff --git a/stack_lts-12.yaml b/stack_lts-12.yaml index 7263f4c8e..8246ca6f0 100644 --- a/stack_lts-12.yaml +++ b/stack_lts-12.yaml @@ -14,3 +14,4 @@ extra-deps: - postgresql-simple-0.6.1 - th-lift-0.8.0.1 - th-lift-instances-0.1.14 +- lift-type-0.1.0.1