Commit 9a2f05e0 authored by Alfredo Di Napoli's avatar Alfredo Di Napoli

Move panicTrace to gargantext-prelude

parent 8e43cc9e
Pipeline #5424 canceled with stages
...@@ -52,4 +52,4 @@ main = do ...@@ -52,4 +52,4 @@ main = do
putStrLn ("Mean size of docs:" <> show (CSV.docsSize docs') :: Text) putStrLn ("Mean size of docs:" <> show (CSV.docsSize docs') :: Text)
CSV.writeFile wPath (h, docs') CSV.writeFile wPath (h, docs')
Left e -> panic $ "Error: " <> e Left e -> panicTrace $ "Error: " <> e
...@@ -95,7 +95,7 @@ main = do ...@@ -95,7 +95,7 @@ main = do
-- r <- mapConcurrentlyChunked (filterTermsAndCooc patterns) (DM.toList corpus) -- r <- mapConcurrentlyChunked (filterTermsAndCooc patterns) (DM.toList corpus)
r <- mapConcurrently (filterTermsAndCooc patterns) (DM.toList corpus) r <- mapConcurrently (filterTermsAndCooc patterns) (DM.toList corpus)
writeFile outputFile $ DTL.toStrict $ TLE.decodeUtf8 $ encode (CoocByYears r) writeFile outputFile $ DTL.toStrict $ TLE.decodeUtf8 $ encode (CoocByYears r)
Left e -> panic $ "Error: " <> e Left e -> panicTrace $ "Error: " <> e
......
...@@ -34,7 +34,8 @@ module Main where ...@@ -34,7 +34,8 @@ module Main where
import Data.Text qualified as T import Data.Text qualified as T
import Database.PostgreSQL.Simple qualified as PSQL import Database.PostgreSQL.Simple qualified as PSQL
import Database.PostgreSQL.Simple.SqlQQ (sql) import Database.PostgreSQL.Simple.SqlQQ (sql)
import Gargantext.Database.Admin.Config (nodeTypeId) import Gargantext.Core (toDBid)
import Gargantext.Database.Admin.Config ()
import Gargantext.Database.Admin.Types.Node (NodeType(..)) import Gargantext.Database.Admin.Types.Node (NodeType(..))
import Gargantext.Prelude hiding (option) import Gargantext.Prelude hiding (option)
import Gargantext.Prelude.Database (runPGSExecute, runPGSQuery) import Gargantext.Prelude.Database (runPGSExecute, runPGSQuery)
...@@ -99,7 +100,7 @@ main = do ...@@ -99,7 +100,7 @@ main = do
obfuscateNotes :: PSQL.Connection -> IO () obfuscateNotes :: PSQL.Connection -> IO ()
obfuscateNotes c = do obfuscateNotes c = do
let nt = nodeTypeId Notes let nt = toDBid Notes
_ <- runPGSExecute c [sql|UPDATE nodes SET name = concat('notes-', id) WHERE typename = ?;|] (PSQL.Only nt) _ <- runPGSExecute c [sql|UPDATE nodes SET name = concat('notes-', id) WHERE typename = ?;|] (PSQL.Only nt)
......
...@@ -43,7 +43,7 @@ main = do ...@@ -43,7 +43,7 @@ main = do
tt = (Multi EN) tt = (Multi EN)
format = CsvGargV3 -- CsvHal --WOS format = CsvGargV3 -- CsvHal --WOS
limit' = case (readMaybe limit :: Maybe Limit) of limit' = case (readMaybe limit :: Maybe Limit) of
Nothing -> panic $ "Cannot read limit: " <> (Text.pack limit) Nothing -> panicTrace $ "Cannot read limit: " <> (Text.pack limit)
Just l -> l Just l -> l
corpus :: forall m. (FlowCmdM DevEnv BackendInternalError m, MonadJobStatus m, JobHandle m ~ DevJobHandle) => m CorpusId corpus :: forall m. (FlowCmdM DevEnv BackendInternalError m, MonadJobStatus m, JobHandle m ~ DevJobHandle) => m CorpusId
corpus = flowCorpusFile (UserName $ cs user) (Left (cs name :: Text)) limit' tt format Plain corpusPath Nothing DevJobHandle corpus = flowCorpusFile (UserName $ cs user) (Left (cs name :: Text)) limit' tt format Plain corpusPath Nothing DevJobHandle
......
...@@ -37,7 +37,7 @@ main = do ...@@ -37,7 +37,7 @@ main = do
params@[iniPath] <- getArgs params@[iniPath] <- getArgs
_ <- if length params /= 1 _ <- if length params /= 1
then panic "USAGE: ./gargantext-init gargantext.ini" then panicTrace "USAGE: ./gargantext-init gargantext.ini"
else pure () else pure ()
putStrLn ("Enter master user (gargantua) _password_ :" :: Text) putStrLn ("Enter master user (gargantua) _password_ :" :: Text)
......
...@@ -31,7 +31,7 @@ main = do ...@@ -31,7 +31,7 @@ main = do
params@[iniPath,user,node_id,email] <- getArgs params@[iniPath,user,node_id,email] <- getArgs
_ <- if length params /= 4 _ <- if length params /= 4
then panic "USAGE: ./gargantext-init gargantext.ini username node_id student@university.edu" then panicTrace "USAGE: ./gargantext-init gargantext.ini username node_id student@university.edu"
else pure () else pure ()
_cfg <- readConfig iniPath _cfg <- readConfig iniPath
......
...@@ -107,7 +107,7 @@ csvToDocs parser patterns time path = ...@@ -107,7 +107,7 @@ csvToDocs parser patterns time path =
Nothing Nothing
[] []
time time
) <$> snd <$> either (\err -> panic $ "CSV error" <> (show err)) identity <$> Csv.readCSVFile path ) <$> snd <$> either (\err -> panicTrace $ "CSV error" <> (show err)) identity <$> Csv.readCSVFile path
Csv' limit -> Vector.toList Csv' limit -> Vector.toList
<$> Vector.take limit <$> Vector.take limit
<$> Vector.map (\row -> Document (toPhyloDate (csv'_publication_year row) (csv'_publication_month row) (csv'_publication_day row) time) <$> Vector.map (\row -> Document (toPhyloDate (csv'_publication_year row) (csv'_publication_month row) (csv'_publication_day row) time)
...@@ -139,7 +139,7 @@ fileToDocsDefault parser path timeUnits lst = ...@@ -139,7 +139,7 @@ fileToDocsDefault parser path timeUnits lst =
if (length periods < 3) if (length periods < 3)
then fileToDocsDefault parser path (tail timeUnits) lst then fileToDocsDefault parser path (tail timeUnits) lst
else pure docs else pure docs
else panic "this corpus is incompatible with the phylomemy reconstruction" else panicTrace "this corpus is incompatible with the phylomemy reconstruction"
-- on passe à passer la time unit dans la conf envoyé au phyloMaker -- on passe à passer la time unit dans la conf envoyé au phyloMaker
-- dans le phyloMaker si default est true alors dans le setDefault ou pense à utiliser la TimeUnit de la conf -- dans le phyloMaker si default est true alors dans le setDefault ou pense à utiliser la TimeUnit de la conf
......
...@@ -74,12 +74,12 @@ main = withLogger () $ \ioLogger -> do ...@@ -74,12 +74,12 @@ main = withLogger () $ \ioLogger -> do
Nothing -> 8008 Nothing -> 8008
myIniFile' = case myIniFile of myIniFile' = case myIniFile of
Nothing -> panic "[ERROR] gargantext.ini needed" Nothing -> panicTrace "[ERROR] gargantext.ini needed"
Just i -> i Just i -> i
--------------------------------------------------------------- ---------------------------------------------------------------
let start = case myMode of let start = case myMode of
Mock -> panic "[ERROR] Mock mode unsupported" Mock -> panicTrace "[ERROR] Mock mode unsupported"
_ -> startGargantext myMode myPort' (unpack myIniFile') _ -> startGargantext myMode myPort' (unpack myIniFile')
logMsg ioLogger INFO $ "Starting with " <> show myMode <> " mode." logMsg ioLogger INFO $ "Starting with " <> show myMode <> " mode."
logMsg ioLogger INFO $ "Machine locale: " <> show currentLocale logMsg ioLogger INFO $ "Machine locale: " <> show currentLocale
......
...@@ -36,7 +36,7 @@ main = do ...@@ -36,7 +36,7 @@ main = do
params@[iniPath] <- getArgs params@[iniPath] <- getArgs
_ <- if length params /= 1 _ <- if length params /= 1
then panic "Usage: ./gargantext-upgrade gargantext.ini" then panicTrace "Usage: ./gargantext-upgrade gargantext.ini"
else pure () else pure ()
putStrLn $ List.unlines putStrLn $ List.unlines
......
...@@ -11,7 +11,7 @@ STORE_DIR="${1:-$DEFAULT_STORE}" ...@@ -11,7 +11,7 @@ STORE_DIR="${1:-$DEFAULT_STORE}"
# `expected_cabal_project_freeze_hash` with the # `expected_cabal_project_freeze_hash` with the
# `sha256sum` result calculated on the `cabal.project` and `cabal.project.freeze`. # `sha256sum` result calculated on the `cabal.project` and `cabal.project.freeze`.
# This ensures the files stay deterministic so that CI cache can kick in. # This ensures the files stay deterministic so that CI cache can kick in.
expected_cabal_project_hash="f2efe6832145c093cfe16832b17b06cd4e2d94e85dd0390e713c46c40ee9e461" expected_cabal_project_hash="69e03370a602f40243373515ff884a2cd50dc02eb6f52cd23ba9016a61fe8069"
expected_cabal_project_freeze_hash="796f0109611f3381278b1885ae1fa257c4177b99885eb04701938f1107c06ee5" expected_cabal_project_freeze_hash="796f0109611f3381278b1885ae1fa257c4177b99885eb04701938f1107c06ee5"
cabal --store-dir=$STORE_DIR v2-update 'hackage.haskell.org,2023-11-23T20:05:40Z' cabal --store-dir=$STORE_DIR v2-update 'hackage.haskell.org,2023-11-23T20:05:40Z'
......
...@@ -119,7 +119,7 @@ source-repository-package ...@@ -119,7 +119,7 @@ source-repository-package
source-repository-package source-repository-package
type: git type: git
location: https://gitlab.iscpif.fr/gargantext/haskell-gargantext-prelude location: https://gitlab.iscpif.fr/gargantext/haskell-gargantext-prelude
tag: fec7427ba8d1047fd68207afb79139f9dea339e0 tag: 5a8dc3a0a1a4774ec2eb9df5f0f0b0a7dd172f09
source-repository-package source-repository-package
type: git type: git
......
...@@ -75,7 +75,6 @@ library ...@@ -75,7 +75,6 @@ library
Gargantext.API.Prelude Gargantext.API.Prelude
Gargantext.API.Routes Gargantext.API.Routes
Gargantext.Core Gargantext.Core
Gargantext.Core.Errors.Types
Gargantext.Core.Mail.Types Gargantext.Core.Mail.Types
Gargantext.Core.Methods.Similarities Gargantext.Core.Methods.Similarities
Gargantext.Core.NLP Gargantext.Core.NLP
......
...@@ -78,7 +78,7 @@ startGargantext mode port file = withLoggerHoisted mode $ \logger -> do ...@@ -78,7 +78,7 @@ startGargantext mode port file = withLoggerHoisted mode $ \logger -> do
(\(_ :: SomeException) -> pure $ Right False) (\(_ :: SomeException) -> pure $ Right False)
case r of case r of
Right True -> pure () Right True -> pure ()
_ -> panic $ _ -> panicTrace $
"You must run 'gargantext-init " <> pack file <> "You must run 'gargantext-init " <> pack file <>
"' before running gargantext-server (only the first time)." "' before running gargantext-server (only the first time)."
......
...@@ -63,7 +63,7 @@ pipeline :: FromJSON e => URL -> ClientEnv -> ScraperInput ...@@ -63,7 +63,7 @@ pipeline :: FromJSON e => URL -> ClientEnv -> ScraperInput
-> (e -> IO ()) -> IO JobLog -> (e -> IO ()) -> IO JobLog
pipeline scrapyurl client_env input log_status = do pipeline scrapyurl client_env input log_status = do
e <- runJobMLog client_env log_status $ callScraper scrapyurl input e <- runJobMLog client_env log_status $ callScraper scrapyurl input
either (panic . show) pure e -- TODO throwError either (panicTrace . show) pure e -- TODO throwError
-- TODO integrate to ServerT -- TODO integrate to ServerT
-- use: -- use:
......
...@@ -23,7 +23,7 @@ import Test.QuickCheck.Arbitrary ...@@ -23,7 +23,7 @@ import Test.QuickCheck.Arbitrary
------------------------------------------------------------------------ ------------------------------------------------------------------------
instance Arbitrary a => Arbitrary (JobStatus 'Safe a) where instance Arbitrary a => Arbitrary (JobStatus 'Safe a) where
arbitrary = panic "TODO" arbitrary = panicTrace "TODO"
instance Arbitrary a => Arbitrary (JobOutput a) where instance Arbitrary a => Arbitrary (JobOutput a) where
arbitrary = JobOutput <$> arbitrary arbitrary = JobOutput <$> arbitrary
......
...@@ -63,7 +63,7 @@ devSettings jwkFile = do ...@@ -63,7 +63,7 @@ devSettings jwkFile = do
, _logLevelLimit = LevelDebug , _logLevelLimit = LevelDebug
-- , _dbServer = "localhost" -- , _dbServer = "localhost"
, _sendLoginEmails = LogEmailToConsole , _sendLoginEmails = LogEmailToConsole
, _scrapydUrl = fromMaybe (panic "Invalid scrapy URL") $ parseBaseUrl "http://localhost:6800" , _scrapydUrl = fromMaybe (panicTrace "Invalid scrapy URL") $ parseBaseUrl "http://localhost:6800"
, _cookieSettings = defaultCookieSettings { cookieXsrfSetting = Just xsrfCookieSetting } -- TODO-SECURITY tune , _cookieSettings = defaultCookieSettings { cookieXsrfSetting = Just xsrfCookieSetting } -- TODO-SECURITY tune
, _jwtSettings = defaultJWTSettings jwk -- TODO-SECURITY tune , _jwtSettings = defaultJWTSettings jwk -- TODO-SECURITY tune
} }
...@@ -177,7 +177,7 @@ newEnv logger port file = do ...@@ -177,7 +177,7 @@ newEnv logger port file = do
!manager_env <- newTlsManager !manager_env <- newTlsManager
!settings' <- devSettings devJwkFile <&> appPort .~ port -- TODO read from 'file' !settings' <- devSettings devJwkFile <&> appPort .~ port -- TODO read from 'file'
when (port /= settings' ^. appPort) $ when (port /= settings' ^. appPort) $
panic "TODO: conflicting settings of port" panicTrace "TODO: conflicting settings of port"
!config_env <- readConfig file !config_env <- readConfig file
prios <- withLogger () $ \ioLogger -> Jobs.readPrios ioLogger (file <> ".jobs") prios <- withLogger () $ \ioLogger -> Jobs.readPrios ioLogger (file <> ".jobs")
......
...@@ -29,4 +29,4 @@ infixr 4 ?! ...@@ -29,4 +29,4 @@ infixr 4 ?!
-- Reverse infix form of "fromJust" with a custom error message -- Reverse infix form of "fromJust" with a custom error message
(?!) :: Maybe a -> String -> a (?!) :: Maybe a -> String -> a
(?!) ma' msg = ma' ?| panic (T.pack msg) (?!) ma' msg = ma' ?| panicTrace (T.pack msg)
...@@ -179,4 +179,4 @@ api ...@@ -179,4 +179,4 @@ api
:: (Typeable env, CmdCommon env, HasJobEnv' env, HasSettings env) :: (Typeable env, CmdCommon env, HasJobEnv' env, HasSettings env)
=> ServerT API (GargM env BackendInternalError) => ServerT API (GargM env BackendInternalError)
api (SAS.Authenticated auser) = (httpPubApp [] . app auser) :<|> pure httpPlayground api (SAS.Authenticated auser) = (httpPubApp [] . app auser) :<|> pure httpPlayground
api _ = panic "401 in graphql" -- SAS.throwAll (_ServerError # err401) api _ = panicTrace "401 in graphql" -- SAS.throwAll (_ServerError # err401)
...@@ -107,7 +107,7 @@ updateUserInfo (UserInfoMArgs { ui_id, .. }) = do ...@@ -107,7 +107,7 @@ updateUserInfo (UserInfoMArgs { ui_id, .. }) = do
lift $ printDebug "[updateUserInfo] ui_id" ui_id lift $ printDebug "[updateUserInfo] ui_id" ui_id
users <- lift (getUsersWithNodeHyperdata ui_id) users <- lift (getUsersWithNodeHyperdata ui_id)
case users of case users of
[] -> panic $ "[updateUserInfo] User with id " <> (T.pack $ show ui_id) <> " doesn't exist." [] -> panicTrace $ "[updateUserInfo] User with id " <> (T.pack $ show ui_id) <> " doesn't exist."
((_u, node_u):_) -> do ((_u, node_u):_) -> do
let u_hyperdata = node_u ^. node_hyperdata let u_hyperdata = node_u ^. node_hyperdata
-- lift $ printDebug "[updateUserInfo] u" u -- lift $ printDebug "[updateUserInfo] u" u
......
...@@ -73,7 +73,7 @@ dbTeam nodeId = do ...@@ -73,7 +73,7 @@ dbTeam nodeId = do
shared_folder_id = unNodeId fId shared_folder_id = unNodeId fId
} }
uId Node { _node_user_id } = _node_user_id uId Node { _node_user_id } = _node_user_id
getUsername [] = panic "[resolveTeam] Team creator doesn't exist" getUsername [] = panicTrace "[resolveTeam] Team creator doesn't exist"
getUsername ((UserLight {userLight_username}, _):_) = userLight_username getUsername ((UserLight {userLight_username}, _):_) = userLight_username
-- TODO: list as argument -- TODO: list as argument
...@@ -83,11 +83,11 @@ deleteTeamMembership TeamDeleteMArgs { token, shared_folder_id, team_node_id } = ...@@ -83,11 +83,11 @@ deleteTeamMembership TeamDeleteMArgs { token, shared_folder_id, team_node_id } =
teamNode <- lift $ getNode $ UnsafeMkNodeId team_node_id teamNode <- lift $ getNode $ UnsafeMkNodeId team_node_id
userNodes <- lift (getUsersWithNodeHyperdata $ Individu.UserDBId $ uId teamNode) userNodes <- lift (getUsersWithNodeHyperdata $ Individu.UserDBId $ uId teamNode)
case userNodes of case userNodes of
[] -> panic $ "[deleteTeamMembership] User with id " <> T.pack (show $ uId teamNode) <> " doesn't exist." [] -> panicTrace $ "[deleteTeamMembership] User with id " <> T.pack (show $ uId teamNode) <> " doesn't exist."
(( _, node_u):_) -> do (( _, node_u):_) -> do
testAuthUser <- lift $ authUser (nId node_u) token testAuthUser <- lift $ authUser (nId node_u) token
case testAuthUser of case testAuthUser of
Invalid -> panic "[deleteTeamMembership] failed to validate user" Invalid -> panicTrace "[deleteTeamMembership] failed to validate user"
Valid -> do Valid -> do
lift $ deleteMemberShip [(UnsafeMkNodeId shared_folder_id, UnsafeMkNodeId team_node_id)] lift $ deleteMemberShip [(UnsafeMkNodeId shared_folder_id, UnsafeMkNodeId team_node_id)]
where where
......
...@@ -124,11 +124,11 @@ updateUserInfo (UserInfoMArgs { ui_id, .. }) = do ...@@ -124,11 +124,11 @@ updateUserInfo (UserInfoMArgs { ui_id, .. }) = do
-- lift $ printDebug "[updateUserInfo] ui_id" ui_id -- lift $ printDebug "[updateUserInfo] ui_id" ui_id
users <- lift (getUsersWithNodeHyperdata (Individu.UserDBId $ UnsafeMkUserId ui_id)) users <- lift (getUsersWithNodeHyperdata (Individu.UserDBId $ UnsafeMkUserId ui_id))
case users of case users of
[] -> panic $ "[updateUserInfo] User with id " <> (T.pack $ show ui_id) <> " doesn't exist." [] -> panicTrace $ "[updateUserInfo] User with id " <> (T.pack $ show ui_id) <> " doesn't exist."
((UserLight { .. }, node_u):_) -> do ((UserLight { .. }, node_u):_) -> do
testAuthUser <- lift $ authUser (nId node_u) token testAuthUser <- lift $ authUser (nId node_u) token
case testAuthUser of case testAuthUser of
Invalid -> panic "[updateUserInfo] failed to validate user" Invalid -> panicTrace "[updateUserInfo] failed to validate user"
Valid -> do Valid -> do
let u_hyperdata = node_u ^. node_hyperdata let u_hyperdata = node_u ^. node_hyperdata
-- lift $ printDebug "[updateUserInfo] u" u -- lift $ printDebug "[updateUserInfo] u" u
......
...@@ -119,7 +119,7 @@ updateScatter' cId listId tabType maybeLimit = do ...@@ -119,7 +119,7 @@ updateScatter' cId listId tabType maybeLimit = do
, m_y = s2 , m_y = s2
, m_cat = listType t ngs' }) , m_cat = listType t ngs' })
$ fmap normalizeLocal scores $ fmap normalizeLocal scores
listType t m = maybe (panic errorMsg) fst $ HashMap.lookup t m listType t m = maybe (panicTrace errorMsg) fst $ HashMap.lookup t m
errorMsg = "API.Node.metrics: key absent" errorMsg = "API.Node.metrics: key absent"
node <- getNodeWith listId (Proxy :: Proxy HyperdataList) node <- getNodeWith listId (Proxy :: Proxy HyperdataList)
......
...@@ -124,7 +124,6 @@ import Gargantext.Prelude hiding (log, to, toLower, (%)) ...@@ -124,7 +124,6 @@ import Gargantext.Prelude hiding (log, to, toLower, (%))
import Gargantext.Prelude.Clock (hasTime, getTime) import Gargantext.Prelude.Clock (hasTime, getTime)
import Gargantext.Utils.Jobs (serveJobsAPI, MonadJobStatus(..)) import Gargantext.Utils.Jobs (serveJobsAPI, MonadJobStatus(..))
import GHC.Conc (readTVar, writeTVar) import GHC.Conc (readTVar, writeTVar)
import Prelude (error)
import Servant hiding (Patch) import Servant hiding (Patch)
{- {-
...@@ -215,7 +214,7 @@ ngramsStatePatchConflictResolution _ngramsType _ngramsTerm ...@@ -215,7 +214,7 @@ ngramsStatePatchConflictResolution _ngramsType _ngramsTerm
-- they do not extend history, -- they do not extend history,
-- they do not bump version. -- they do not bump version.
insertNewOnly :: a -> Maybe b -> a insertNewOnly :: a -> Maybe b -> a
insertNewOnly m = maybe m (const $ error "insertNewOnly: impossible") insertNewOnly m = maybe m (const $ errorTrace "insertNewOnly: impossible")
-- TODO error handling -- TODO error handling
{- unused {- unused
......
...@@ -129,7 +129,7 @@ postAsyncJSON l ngramsList jobHandle = do ...@@ -129,7 +129,7 @@ postAsyncJSON l ngramsList jobHandle = do
markProgress 1 jobHandle markProgress 1 jobHandle
corpus_node <- getNode l -- (Proxy :: Proxy HyperdataList) corpus_node <- getNode l -- (Proxy :: Proxy HyperdataList)
let corpus_id = fromMaybe (panic "no parent_id") (_node_parent_id corpus_node) let corpus_id = fromMaybe (panicTrace "no parent_id") (_node_parent_id corpus_node)
_ <- reIndexWith corpus_id l NgramsTerms (Set.fromList [MapTerm, CandidateTerm]) _ <- reIndexWith corpus_id l NgramsTerms (Set.fromList [MapTerm, CandidateTerm])
markComplete jobHandle markComplete jobHandle
......
...@@ -14,6 +14,7 @@ Portability : POSIX ...@@ -14,6 +14,7 @@ Portability : POSIX
module Gargantext.API.Ngrams.Tools module Gargantext.API.Ngrams.Tools
where where
-- import Gargantext.Core.NodeStoryFile qualified as NSF
import Control.Lens (_Just, (^.), at, view, At, Index, IxValue) import Control.Lens (_Just, (^.), at, view, At, Index, IxValue)
import Control.Monad.Reader import Control.Monad.Reader
import Data.HashMap.Strict (HashMap) import Data.HashMap.Strict (HashMap)
...@@ -21,13 +22,12 @@ import Data.HashMap.Strict qualified as HM ...@@ -21,13 +22,12 @@ import Data.HashMap.Strict qualified as HM
import Data.Map.Strict qualified as Map import Data.Map.Strict qualified as Map
import Data.Set qualified as Set import Data.Set qualified as Set
import Data.Validity import Data.Validity
import GHC.Conc (TVar, readTVar)
import Gargantext.API.Ngrams.Types import Gargantext.API.Ngrams.Types
import Gargantext.Core.NodeStory import Gargantext.Core.NodeStory
-- import Gargantext.Core.NodeStoryFile qualified as NSF
import Gargantext.Core.Types (ListType(..), NodeId, ListId) import Gargantext.Core.Types (ListType(..), NodeId, ListId)
import Gargantext.Database.Schema.Ngrams (NgramsType) import Gargantext.Database.Schema.Ngrams (NgramsType)
import Gargantext.Prelude import Gargantext.Prelude
import GHC.Conc (TVar, readTVar)
mergeNgramsElement :: NgramsRepoElement -> NgramsRepoElement -> NgramsRepoElement mergeNgramsElement :: NgramsRepoElement -> NgramsRepoElement -> NgramsRepoElement
...@@ -134,7 +134,7 @@ filterListWithRootHashMap lt m = snd <$> HM.filter isMapTerm m ...@@ -134,7 +134,7 @@ filterListWithRootHashMap lt m = snd <$> HM.filter isMapTerm m
isMapTerm (l, maybeRoot) = case maybeRoot of isMapTerm (l, maybeRoot) = case maybeRoot of
Nothing -> l == lt Nothing -> l == lt
Just r -> case HM.lookup r m of Just r -> case HM.lookup r m of
Nothing -> panic $ "[Garg.API.Ngrams.Tools] filterListWithRootHashMap, unknown key: " <> unNgramsTerm r Nothing -> panicTrace $ "[Garg.API.Ngrams.Tools] filterListWithRootHashMap, unknown key: " <> unNgramsTerm r
Just (l',_) -> l' == lt Just (l',_) -> l' == lt
filterListWithRoot :: [ListType] filterListWithRoot :: [ListType]
...@@ -145,7 +145,7 @@ filterListWithRoot lt m = snd <$> HM.filter isMapTerm m ...@@ -145,7 +145,7 @@ filterListWithRoot lt m = snd <$> HM.filter isMapTerm m
isMapTerm (l, maybeRoot) = case maybeRoot of isMapTerm (l, maybeRoot) = case maybeRoot of
Nothing -> elem l lt Nothing -> elem l lt
Just r -> case HM.lookup r m of Just r -> case HM.lookup r m of
Nothing -> panic $ "[Garg.API.Ngrams.Tools] filterListWithRoot, unknown key: " <> unNgramsTerm r Nothing -> panicTrace $ "[Garg.API.Ngrams.Tools] filterListWithRoot, unknown key: " <> unNgramsTerm r
Just (l',_) -> elem l' lt Just (l',_) -> elem l' lt
groupNodesByNgrams :: ( Ord a groupNodesByNgrams :: ( Ord a
...@@ -160,7 +160,7 @@ groupNodesByNgrams syn occs = HM.fromListWith (<>) occs' ...@@ -160,7 +160,7 @@ groupNodesByNgrams syn occs = HM.fromListWith (<>) occs'
where where
occs' = map toSyn (HM.toList occs) occs' = map toSyn (HM.toList occs)
toSyn (t,ns) = case syn ^. at t of toSyn (t,ns) = case syn ^. at t of
Nothing -> panic $ "[Garg.API.Ngrams.Tools.groupNodesByNgrams] unknown key: " <> unNgramsTerm t Nothing -> panicTrace $ "[Garg.API.Ngrams.Tools.groupNodesByNgrams] unknown key: " <> unNgramsTerm t
Just r -> case r of Just r -> case r of
Nothing -> (t, ns) Nothing -> (t, ns)
Just r' -> (r',ns) Just r' -> (r',ns)
......
...@@ -32,7 +32,7 @@ import Data.Patch.Class (Replace, replace, Action(act), Group, Applicable(..), C ...@@ -32,7 +32,7 @@ import Data.Patch.Class (Replace, replace, Action(act), Group, Applicable(..), C
import Data.Set qualified as Set import Data.Set qualified as Set
import Data.String (IsString(..)) import Data.String (IsString(..))
import Data.Swagger hiding (version, patch) import Data.Swagger hiding (version, patch)
import Data.Text (pack, strip, unpack) import Data.Text (pack, strip)
import Data.Validity import Data.Validity
import Database.PostgreSQL.Simple.FromField (FromField, fromField, fromJSONField) import Database.PostgreSQL.Simple.FromField (FromField, fromField, fromJSONField)
import Database.PostgreSQL.Simple.ToField (ToField, toJSONField, toField) import Database.PostgreSQL.Simple.ToField (ToField, toJSONField, toField)
...@@ -49,7 +49,6 @@ import Servant hiding (Patch) ...@@ -49,7 +49,6 @@ import Servant hiding (Patch)
import Servant.Job.Utils (jsonOptions) import Servant.Job.Utils (jsonOptions)
import Test.QuickCheck (elements, frequency) import Test.QuickCheck (elements, frequency)
import Test.QuickCheck.Arbitrary (Arbitrary, arbitrary) import Test.QuickCheck.Arbitrary (Arbitrary, arbitrary)
import Gargantext.Core.Errors.Types (panicTrace)
------------------------------------------------------------------------ ------------------------------------------------------------------------
...@@ -792,7 +791,7 @@ ngramsTypeFromTabType tabType = ...@@ -792,7 +791,7 @@ ngramsTypeFromTabType tabType =
Authors -> TableNgrams.Authors Authors -> TableNgrams.Authors
Institutes -> TableNgrams.Institutes Institutes -> TableNgrams.Institutes
Terms -> TableNgrams.NgramsTerms Terms -> TableNgrams.NgramsTerms
_ -> panicTrace $ unpack $ here <> "No Ngrams for this tab" _ -> panicTrace $ here <> "No Ngrams for this tab"
-- TODO: This `panic` would disapear with custom NgramsType. -- TODO: This `panic` would disapear with custom NgramsType.
---- ----
......
...@@ -101,7 +101,7 @@ type Roots = Get '[JSON] [Node HyperdataUser] ...@@ -101,7 +101,7 @@ type Roots = Get '[JSON] [Node HyperdataUser]
-- | TODO: access by admin only -- | TODO: access by admin only
roots :: GargServer Roots roots :: GargServer Roots
roots = getNodesWithParentId Nothing roots = getNodesWithParentId Nothing
:<|> pure (panic "not implemented yet") -- TODO use patch map to update what we need :<|> pure (panicTrace "not implemented yet") -- TODO use patch map to update what we need
------------------------------------------------------------------- -------------------------------------------------------------------
-- | Node API Types management -- | Node API Types management
......
...@@ -20,8 +20,6 @@ module Gargantext.API.Node.Corpus.New ...@@ -20,8 +20,6 @@ module Gargantext.API.Node.Corpus.New
where where
-- import Servant.Multipart
-- import Test.QuickCheck (elements)
import Conduit import Conduit
import Control.Lens hiding (elements, Empty) import Control.Lens hiding (elements, Empty)
import Data.Aeson import Data.Aeson
...@@ -307,7 +305,7 @@ addToCorpusWithForm user cid nwf jobHandle = do ...@@ -307,7 +305,7 @@ addToCorpusWithForm user cid nwf jobHandle = do
let data' = case (nwf ^. wf_fileformat) of let data' = case (nwf ^. wf_fileformat) of
Plain -> cs (nwf ^. wf_data) Plain -> cs (nwf ^. wf_data)
ZIP -> case BSB64.decode $ TE.encodeUtf8 (nwf ^. wf_data) of ZIP -> case BSB64.decode $ TE.encodeUtf8 (nwf ^. wf_data) of
Left err -> panic $ T.pack "[addToCorpusWithForm] error decoding base64: " <> T.pack err Left err -> panicTrace $ T.pack "[addToCorpusWithForm] error decoding base64: " <> T.pack err
Right decoded -> decoded Right decoded -> decoded
eDocsC <- liftBase $ parseC (nwf ^. wf_fileformat) data' eDocsC <- liftBase $ parseC (nwf ^. wf_fileformat) data'
case eDocsC of case eDocsC of
...@@ -325,7 +323,7 @@ addToCorpusWithForm user cid nwf jobHandle = do ...@@ -325,7 +323,7 @@ addToCorpusWithForm user cid nwf jobHandle = do
, ")" ] , ")" ]
let panicMsg = T.concat $ T.pack <$> panicMsg' let panicMsg = T.concat $ T.pack <$> panicMsg'
--logStatus $ jobLogFailTotalWithMessage panicMsg jobLog --logStatus $ jobLogFailTotalWithMessage panicMsg jobLog
panic panicMsg panicTrace panicMsg
else else
pure doc) pure doc)
.| mapC toHyperdataDocument .| mapC toHyperdataDocument
......
...@@ -63,8 +63,8 @@ postUpload :: NodeId ...@@ -63,8 +63,8 @@ postUpload :: NodeId
-> Maybe FileFormat -> Maybe FileFormat
-> MultipartData Mem -> MultipartData Mem
-> Cmd err [Hash] -> Cmd err [Hash]
postUpload _ Nothing _ _ = panic "fileType is a required parameter" postUpload _ Nothing _ _ = panicTrace "fileType is a required parameter"
postUpload _ _ Nothing _ = panic "fileFormat is a required parameter" postUpload _ _ Nothing _ = panicTrace "fileFormat is a required parameter"
postUpload _ (Just _fileType) (Just _fileFormat) multipartData = do postUpload _ (Just _fileType) (Just _fileFormat) multipartData = do
-- printDebug "File Type: " fileType -- printDebug "File Type: " fileType
-- printDebug "File format: " fileFormat -- printDebug "File format: " fileFormat
......
...@@ -30,7 +30,7 @@ instance FromHttpApiData FileType where ...@@ -30,7 +30,7 @@ instance FromHttpApiData FileType where
parseUrlPiece "WOS" = pure WOS parseUrlPiece "WOS" = pure WOS
parseUrlPiece "Iramuteq" = pure Iramuteq parseUrlPiece "Iramuteq" = pure Iramuteq
parseUrlPiece "JSON" = pure JSON parseUrlPiece "JSON" = pure JSON
parseUrlPiece s = panic $ "[G.A.A.Node.Corpus.New] File Type not implemented (yet): " <> s parseUrlPiece s = panicTrace $ "[G.A.A.Node.Corpus.New] File Type not implemented (yet): " <> s
instance ToHttpApiData FileType where instance ToHttpApiData FileType where
toUrlPiece = pack . show toUrlPiece = pack . show
......
...@@ -11,7 +11,6 @@ Portability : POSIX ...@@ -11,7 +11,6 @@ Portability : POSIX
module Gargantext.API.Node.Document.Export module Gargantext.API.Node.Document.Export
where where
-- import Gargantext.Database.Admin.Types.Hyperdata (HyperdataDocument(..))
import Control.Lens (view) import Control.Lens (view)
import Data.Csv (encodeDefaultOrderedByName) import Data.Csv (encodeDefaultOrderedByName)
import Data.Version (showVersion) import Data.Version (showVersion)
...@@ -46,7 +45,7 @@ getDocumentsJSON :: NodeId ...@@ -46,7 +45,7 @@ getDocumentsJSON :: NodeId
getDocumentsJSON nodeUserId pId = do getDocumentsJSON nodeUserId pId = do
uId <- view node_user_id <$> getNodeUser nodeUserId uId <- view node_user_id <$> getNodeUser nodeUserId
mcId <- getClosestParentIdByType pId NodeCorpus mcId <- getClosestParentIdByType pId NodeCorpus
let cId = maybe (panic "[G.A.N.D.Export] Node has no parent") identity mcId let cId = maybe (panicTrace "[G.A.N.D.Export] Node has no parent") identity mcId
docs <- runViewDocuments cId False Nothing Nothing Nothing Nothing Nothing docs <- runViewDocuments cId False Nothing Nothing Nothing Nothing Nothing
pure $ addHeader (T.concat [ "attachment; filename=GarganText_DocsList-" pure $ addHeader (T.concat [ "attachment; filename=GarganText_DocsList-"
, T.pack $ show pId , T.pack $ show pId
......
...@@ -100,7 +100,7 @@ documentUpload nId doc = do ...@@ -100,7 +100,7 @@ documentUpload nId doc = do
mcId <- getClosestParentIdByType' nId NodeCorpus mcId <- getClosestParentIdByType' nId NodeCorpus
let cId = case mcId of let cId = case mcId of
Just c -> c Just c -> c
Nothing -> panic $ T.pack $ "[G.A.N.DU] Node has no corpus parent: " <> show nId Nothing -> panicTrace $ T.pack $ "[G.A.N.DU] Node has no corpus parent: " <> show nId
let mDateS = Just $ view du_date doc let mDateS = Just $ view du_date doc
let (theFullDate, (year, month, day)) = mDateSplit mDateS let (theFullDate, (year, month, day)) = mDateSplit mDateS
......
...@@ -46,7 +46,6 @@ import Gargantext.Database.Schema.Node (node_hyperdata, node_name, node_date) ...@@ -46,7 +46,6 @@ import Gargantext.Database.Schema.Node (node_hyperdata, node_name, node_date)
import Gargantext.Prelude import Gargantext.Prelude
import Gargantext.Utils.Jobs (serveJobsAPI, MonadJobStatus(..)) import Gargantext.Utils.Jobs (serveJobsAPI, MonadJobStatus(..))
import Servant import Servant
-- import qualified Gargantext.Defaults as Defaults
------------------------------------------------------------------------ ------------------------------------------------------------------------
type API = Summary " Documents from Write nodes." type API = Summary " Documents from Write nodes."
...@@ -94,7 +93,7 @@ documentsFromWriteNodes authenticatedUser nId Params { selection, lang, paragrap ...@@ -94,7 +93,7 @@ documentsFromWriteNodes authenticatedUser nId Params { selection, lang, paragrap
Nothing -> do Nothing -> do
let msg = T.pack $ "[G.A.N.DFWN] Node has no corpus parent: " <> show nId let msg = T.pack $ "[G.A.N.DFWN] Node has no corpus parent: " <> show nId
markFailed (Just msg) jobHandle markFailed (Just msg) jobHandle
panic msg panicTrace msg
frameWriteIds <- getChildrenByType nId Notes frameWriteIds <- getChildrenByType nId Notes
......
...@@ -71,7 +71,7 @@ instance GargDB.SaveFile NewWithFile where ...@@ -71,7 +71,7 @@ instance GargDB.SaveFile NewWithFile where
saveFile' fp (NewWithFile b64d _ _) = do saveFile' fp (NewWithFile b64d _ _) = do
let eDecoded = BSB64.decode $ encodeUtf8 b64d let eDecoded = BSB64.decode $ encodeUtf8 b64d
case eDecoded of case eDecoded of
Left err -> panic $ T.pack $ "Error decoding: " <> err Left err -> panicTrace $ T.pack $ "Error decoding: " <> err
Right decoded -> BS.writeFile fp decoded Right decoded -> BS.writeFile fp decoded
-- BS.writeFile fp $ BSB64.decodeLenient $ TE.encodeUtf8 b64d -- BS.writeFile fp $ BSB64.decodeLenient $ TE.encodeUtf8 b64d
......
...@@ -112,8 +112,8 @@ updateNode nid1 (LinkNodeReq nt nid2) jobHandle = do ...@@ -112,8 +112,8 @@ updateNode nid1 (LinkNodeReq nt nid2) jobHandle = do
_ <- case nt of _ <- case nt of
NodeAnnuaire -> pairing nid2 nid1 Nothing -- defaultList NodeAnnuaire -> pairing nid2 nid1 Nothing -- defaultList
NodeCorpus -> pairing nid1 nid2 Nothing -- defaultList NodeCorpus -> pairing nid1 nid2 Nothing -- defaultList
_ -> panic $ "[G.API.N.Update.updateNode] NodeType not implemented" _ -> panicTrace $ "[G.API.N.Update.updateNode] NodeType not implemented"
<> show nt <> " nid1: " <> show nid1 <> " nid2: " <> show nid2 <> show nt <> " nid1: " <> show nid1 <> " nid2: " <> show nid2
markComplete jobHandle markComplete jobHandle
...@@ -154,7 +154,7 @@ updateNode phyloId (UpdateNodePhylo config) jobHandle = do ...@@ -154,7 +154,7 @@ updateNode phyloId (UpdateNodePhylo config) jobHandle = do
corpusId' <- view node_parent_id <$> getNode phyloId corpusId' <- view node_parent_id <$> getNode phyloId
markProgress 1 jobHandle markProgress 1 jobHandle
let corpusId = fromMaybe (panic "") corpusId' let corpusId = fromMaybe (panicTrace "no corpus id") corpusId'
phy <- flowPhyloAPI (subConfigAPI2config config) corpusId phy <- flowPhyloAPI (subConfigAPI2config config) corpusId
markProgress 2 jobHandle markProgress 2 jobHandle
...@@ -180,7 +180,7 @@ updateNode tId (UpdateNodeParamsTexts _mode) jobHandle = do ...@@ -180,7 +180,7 @@ updateNode tId (UpdateNodeParamsTexts _mode) jobHandle = do
_ <- case corpusId of _ <- case corpusId of
Just cId -> updateDocs cId Just cId -> updateDocs cId
Nothing -> do Nothing -> do
_ <- panic "[G.A.N.Update] updateNode/UpdateNodeParamsText: no corpus Id given" _ <- panicTrace "[G.A.N.Update] updateNode/UpdateNodeParamsText: no corpus Id given"
pure () pure ()
markComplete jobHandle markComplete jobHandle
......
...@@ -29,7 +29,6 @@ import Gargantext.API.Public qualified as Public ...@@ -29,7 +29,6 @@ import Gargantext.API.Public qualified as Public
import Gargantext.API.Routes import Gargantext.API.Routes
import Gargantext.API.Swagger (swaggerDoc) import Gargantext.API.Swagger (swaggerDoc)
import Gargantext.API.ThrowAll (serverPrivateGargAPI) import Gargantext.API.ThrowAll (serverPrivateGargAPI)
import Gargantext.Core.Errors.Types
import Gargantext.Database.Prelude (hasConfig) import Gargantext.Database.Prelude (hasConfig)
import Gargantext.Prelude hiding (Handler, catch) import Gargantext.Prelude hiding (Handler, catch)
import Gargantext.Prelude.Config (gc_url_backend_api) import Gargantext.Prelude.Config (gc_url_backend_api)
......
...@@ -142,7 +142,7 @@ postTableApi cId tq = case tq of ...@@ -142,7 +142,7 @@ postTableApi cId tq = case tq of
$(logLocM) DEBUG $ "New search with query " <> getRawQuery q $(logLocM) DEBUG $ "New search with query " <> getRawQuery q
searchInCorpus' cId False q (Just o) (Just l) (Just order) searchInCorpus' cId False q (Just o) (Just l) (Just order)
Trash -> searchInCorpus' cId True q (Just o) (Just l) (Just order) Trash -> searchInCorpus' cId True q (Just o) (Just l) (Just order)
x -> panic $ "not implemented in tableApi " <> (show x) x -> panicTrace $ "not implemented in tableApi " <> (show x)
getTableHashApi :: (CmdM env err m, HasNodeError err, MonadLogger m) getTableHashApi :: (CmdM env err m, HasNodeError err, MonadLogger m)
=> NodeId => NodeId
...@@ -204,7 +204,7 @@ getTable' cId ft o l order query year = ...@@ -204,7 +204,7 @@ getTable' cId ft o l order query year =
(Just Trash) -> runViewDocuments cId True o l order query year (Just Trash) -> runViewDocuments cId True o l order query year
(Just MoreFav) -> moreLike cId o l order IsFav (Just MoreFav) -> moreLike cId o l order IsFav
(Just MoreTrash) -> moreLike cId o l order IsTrash (Just MoreTrash) -> moreLike cId o l order IsTrash
x -> panic $ "not implemented in getTable: " <> (show x) x -> panicTrace $ "not implemented in getTable: " <> (show x)
getPair :: ContactId -> Maybe TabType getPair :: ContactId -> Maybe TabType
...@@ -214,4 +214,4 @@ getPair cId ft o l order = ...@@ -214,4 +214,4 @@ getPair cId ft o l order =
case ft of case ft of
(Just Docs) -> runViewAuthorsDoc cId False o l order (Just Docs) -> runViewAuthorsDoc cId False o l order
(Just Trash) -> runViewAuthorsDoc cId True o l order (Just Trash) -> runViewAuthorsDoc cId True o l order
_ -> panic $ "not implemented: get Pairing" <> (show ft) _ -> panicTrace $ "not implemented: get Pairing" <> (show ft)
...@@ -27,7 +27,6 @@ import Servant.API ...@@ -27,7 +27,6 @@ import Servant.API
import Test.QuickCheck import Test.QuickCheck
import Control.Exception (throw) import Control.Exception (throw)
import Prelude (userError) import Prelude (userError)
import Gargantext.Core.Errors.Types (WithStacktrace(..))
------------------------------------------------------------------------ ------------------------------------------------------------------------
-- | Language of a Text -- | Language of a Text
......
module Gargantext.Core.Errors.Types (
-- * Attaching callstacks to exceptions
WithStacktrace(..)
, UnexpectedPanic(..)
, withStacktrace
-- * Drop-in replacement for panic/error
, panicTrace
) where
import Control.Exception
import GHC.Stack
import Prelude
-- | A 'WithStacktrace' carries an error alongside its
-- 'CallStack', to be able to print the correct source location
-- of where the error originated.
data WithStacktrace e =
WithStacktrace {
ct_callStack :: !CallStack
, ct_error :: !e
} deriving Show
instance Exception e => Exception (WithStacktrace e) where
displayException WithStacktrace{..}
= displayException ct_error <> "\n" <> prettyCallStack ct_callStack
withStacktrace :: HasCallStack => e -> WithStacktrace e
withStacktrace = withFrozenCallStack . WithStacktrace callStack
newtype UnexpectedPanic = UnexpectedPanic String
deriving Show
instance Exception UnexpectedPanic
panicTrace :: HasCallStack => String -> x
panicTrace = throw . withFrozenCallStack . WithStacktrace callStack . UnexpectedPanic
...@@ -19,6 +19,7 @@ module Gargantext.Core.Ext.IMTUser -- (deserialiseImtUsersFromFile) ...@@ -19,6 +19,7 @@ module Gargantext.Core.Ext.IMTUser -- (deserialiseImtUsersFromFile)
import Codec.Serialise import Codec.Serialise
import Data.ByteString.Lazy qualified as BL import Data.ByteString.Lazy qualified as BL
import Data.Csv import Data.Csv
import Data.Text qualified as T
import Data.Vector (Vector) import Data.Vector (Vector)
import Data.Vector qualified as Vector import Data.Vector qualified as Vector
import Gargantext.Core.Text.Corpus.Parsers.CSV import Gargantext.Core.Text.Corpus.Parsers.CSV
...@@ -29,9 +30,9 @@ import System.FilePath.Posix (takeExtension) ...@@ -29,9 +30,9 @@ import System.FilePath.Posix (takeExtension)
------------------------------------------------------------------------ ------------------------------------------------------------------------
readFile_Annuaire :: FilePath -> IO [HyperdataContact] readFile_Annuaire :: FilePath -> IO [HyperdataContact]
readFile_Annuaire fp = case takeExtension fp of readFile_Annuaire fp = case takeExtension fp of
".csv" -> readCSVFile_Annuaire fp ".csv" -> readCSVFile_Annuaire fp
".data" -> deserialiseImtUsersFromFile fp ".data" -> deserialiseImtUsersFromFile fp
_ -> panic "[G.C.E.I.readFile_Annuaire] extension unknown" unknownExt -> panicTrace $ "[G.C.E.I.readFile_Annuaire] extension unknown: " <> T.pack unknownExt
------------------------------------------------------------------------ ------------------------------------------------------------------------
data IMTUser = IMTUser data IMTUser = IMTUser
...@@ -119,7 +120,7 @@ readCSVFile_Annuaire' = fmap readCsvHalLazyBS' . BL.readFile ...@@ -119,7 +120,7 @@ readCSVFile_Annuaire' = fmap readCsvHalLazyBS' . BL.readFile
where where
readCsvHalLazyBS' :: BL.ByteString -> (Header, Vector IMTUser) readCsvHalLazyBS' :: BL.ByteString -> (Header, Vector IMTUser)
readCsvHalLazyBS' bs = case decodeByNameWith (csvDecodeOptions Tab) bs of readCsvHalLazyBS' bs = case decodeByNameWith (csvDecodeOptions Tab) bs of
Left e -> panic (cs e) Left e -> panicTrace (cs e)
Right rows -> rows Right rows -> rows
------------------------------------------------------------------------ ------------------------------------------------------------------------
......
...@@ -114,7 +114,6 @@ import Database.PostgreSQL.Simple.Types (Values(..), QualifiedIdentifier(..)) ...@@ -114,7 +114,6 @@ import Database.PostgreSQL.Simple.Types (Values(..), QualifiedIdentifier(..))
import GHC.Conc (TVar, newTVar, readTVar, writeTVar) import GHC.Conc (TVar, newTVar, readTVar, writeTVar)
import Gargantext.API.Ngrams.Types import Gargantext.API.Ngrams.Types
import Gargantext.Core (toDBid) import Gargantext.Core (toDBid)
import Gargantext.Core.Errors.Types (panicTrace)
import Gargantext.Core.Types (ListId, NodeId(..), NodeType) import Gargantext.Core.Types (ListId, NodeId(..), NodeType)
import Gargantext.Core.Utils.Prefix (unPrefix) import Gargantext.Core.Utils.Prefix (unPrefix)
import Gargantext.Database.Admin.Config () import Gargantext.Database.Admin.Config ()
......
...@@ -26,7 +26,7 @@ risPress2csvWrite f = do ...@@ -26,7 +26,7 @@ risPress2csvWrite f = do
eContents <- parseFile RisPresse Plain (f <> ".ris") eContents <- parseFile RisPresse Plain (f <> ".ris")
case eContents of case eContents of
Right contents -> writeDocs2Csv (f <> ".csv") contents Right contents -> writeDocs2Csv (f <> ".csv") contents
Left e -> panic $ "Error: " <> e Left e -> panicTrace $ "Error: " <> e
...@@ -30,7 +30,7 @@ import Servant.Client (ClientError) ...@@ -30,7 +30,7 @@ import Servant.Client (ClientError)
get :: Maybe ISO639.ISO639_1 -> Text -> Maybe Int -> IO [HyperdataDocument] get :: Maybe ISO639.ISO639_1 -> Text -> Maybe Int -> IO [HyperdataDocument]
get la q ml = do get la q ml = do
eDocs <- HAL.getMetadataWith [q] (Just 0) (fromIntegral <$> ml) la eDocs <- HAL.getMetadataWith [q] (Just 0) (fromIntegral <$> ml) la
either (panic . pack . show) (\d -> mapM (toDoc' la) $ HAL._docs d) eDocs either (panicTrace . pack . show) (\d -> mapM (toDoc' la) $ HAL._docs d) eDocs
getC :: Maybe ISO639.ISO639_1 -> Text -> Maybe Int -> IO (Either ClientError (Maybe Integer, ConduitT () HyperdataDocument IO ())) getC :: Maybe ISO639.ISO639_1 -> Text -> Maybe Int -> IO (Either ClientError (Maybe Integer, ConduitT () HyperdataDocument IO ()))
getC la q ml = do getC la q ml = do
......
...@@ -31,8 +31,8 @@ get :: Lang -> Maybe Isidore.Limit ...@@ -31,8 +31,8 @@ get :: Lang -> Maybe Isidore.Limit
-> IO [HyperdataDocument] -> IO [HyperdataDocument]
get la l q a = do get la l q a = do
let let
printErr (DecodeFailure e _) = panic e printErr (DecodeFailure e _) = panicTrace e
printErr e = panic (show e) printErr e = panicTrace (show e)
toIsidoreDocs :: Reply -> [IsidoreDoc] toIsidoreDocs :: Reply -> [IsidoreDoc]
toIsidoreDocs (ReplyOnly r) = [r] toIsidoreDocs (ReplyOnly r) = [r]
......
...@@ -58,7 +58,7 @@ get la query' maxResults = do ...@@ -58,7 +58,7 @@ get la query' maxResults = do
Right (ISTEX.Documents { _documents_hits }) -> printDebug "[Istex.get] length docs" $ length _documents_hits Right (ISTEX.Documents { _documents_hits }) -> printDebug "[Istex.get] length docs" $ length _documents_hits
--ISTEX.getMetadataScrollProgress q ((\_ -> pack $ "1m") <$> ml) Nothing progress errorHandler --ISTEX.getMetadataScrollProgress q ((\_ -> pack $ "1m") <$> ml) Nothing progress errorHandler
case eDocs of case eDocs of
Left err -> panic . Text.pack . show $ err Left err -> panicTrace . Text.pack . show $ err
Right docs -> toDoc' la docs Right docs -> toDoc' la docs
--pure $ either (panic . pack . show) (toDoc' la) eDocs --pure $ either (panic . pack . show) (toDoc' la) eDocs
-- where -- where
......
...@@ -285,7 +285,7 @@ withParser RIS = RIS.parser ...@@ -285,7 +285,7 @@ withParser RIS = RIS.parser
withParser Iramuteq = Iramuteq.parser withParser Iramuteq = Iramuteq.parser
--withParser ODT = odtParser --withParser ODT = odtParser
--withParser XML = xmlParser --withParser XML = xmlParser
withParser _ = panic "[ERROR] Parser not implemented yet" withParser _ = panicTrace "[ERROR] Parser not implemented yet"
runParser :: FileType -> DB.ByteString runParser :: FileType -> DB.ByteString
-> IO (Either Text [[(DB.ByteString, DB.ByteString)]]) -> IO (Either Text [[(DB.ByteString, DB.ByteString)]])
......
...@@ -73,7 +73,7 @@ fileNameInfo fp = toFileInfo xs ...@@ -73,7 +73,7 @@ fileNameInfo fp = toFileInfo xs
where where
xs = DT.splitOn "_" $ DT.pack fp xs = DT.splitOn "_" $ DT.pack fp
toFileInfo (a:b:_) = FileInfo (DT.splitOn "-and-" a) (cs b) toFileInfo (a:b:_) = FileInfo (DT.splitOn "-and-" a) (cs b)
toFileInfo _ = panic "error" toFileInfo _ = panicTrace "error"
--------------------------------------------------------------------- ---------------------------------------------------------------------
publiToHyperdata :: Int -> Publi -> HyperdataDocument publiToHyperdata :: Int -> Publi -> HyperdataDocument
......
...@@ -496,6 +496,6 @@ readWeightedCsv :: FilePath -> IO (Header, Vector Csv') ...@@ -496,6 +496,6 @@ readWeightedCsv :: FilePath -> IO (Header, Vector Csv')
readWeightedCsv fp = readWeightedCsv fp =
fmap (\bs -> fmap (\bs ->
case decodeByNameWith (csvDecodeOptions Tab) bs of case decodeByNameWith (csvDecodeOptions Tab) bs of
Left e -> panic (pack e) Left e -> panicTrace (pack e)
Right corpus -> corpus Right corpus -> corpus
) $ BL.readFile fp ) $ BL.readFile fp
...@@ -44,7 +44,7 @@ type FilePathOut = FilePath ...@@ -44,7 +44,7 @@ type FilePathOut = FilePath
json2csv :: FilePathIn -> FilePathOut -> IO () json2csv :: FilePathIn -> FilePathOut -> IO ()
json2csv fin fout = do json2csv fin fout = do
patents <- maybe (panic "json2csv error") identity <$> readPatents fin patents <- maybe (panicTrace "json2csv error") identity <$> readPatents fin
writeFile fout (headerCsvGargV3, fromList $ map patent2csvDoc patents) writeFile fout (headerCsvGargV3, fromList $ map patent2csvDoc patents)
patent2csvDoc :: Patent -> CsvDoc patent2csvDoc :: Patent -> CsvDoc
......
...@@ -114,7 +114,7 @@ unbound' l = map (map (unbound l)) ...@@ -114,7 +114,7 @@ unbound' l = map (map (unbound l))
toWikiResult :: [Maybe Text] -> WikiResult toWikiResult :: [Maybe Text] -> WikiResult
toWikiResult (c:t:u:ys:ye:yf:_) = WikiResult c t u ys ye yf toWikiResult (c:t:u:ys:ye:yf:_) = WikiResult c t u ys ye yf
toWikiResult _ = panic "[G.C.T.C.Parsers.Wikidata.toWikiResult] error" toWikiResult _ = panicTrace "[G.C.T.C.Parsers.Wikidata.toWikiResult] error"
wikidataRoute :: EndPoint wikidataRoute :: EndPoint
wikidataRoute = "https://query.wikidata.org/sparql" wikidataRoute = "https://query.wikidata.org/sparql"
......
...@@ -88,7 +88,7 @@ fromCsvListFile :: FilePath -> IO (Header, Vector CsvList) ...@@ -88,7 +88,7 @@ fromCsvListFile :: FilePath -> IO (Header, Vector CsvList)
fromCsvListFile fp = do fromCsvListFile fp = do
csvData <- BL.readFile fp csvData <- BL.readFile fp
case decodeByNameWith csvDecodeOptions csvData of case decodeByNameWith csvDecodeOptions csvData of
Left e -> panic (pack e) Left e -> panicTrace (pack e)
Right csvList -> pure csvList Right csvList -> pure csvList
------------------------------------------------------------------------ ------------------------------------------------------------------------
toCsvListFile :: FilePath -> (Header, Vector CsvList) -> IO () toCsvListFile :: FilePath -> (Header, Vector CsvList) -> IO ()
......
...@@ -83,7 +83,7 @@ type Param = Double ...@@ -83,7 +83,7 @@ type Param = Double
grid :: (MonadBase IO m) grid :: (MonadBase IO m)
=> Param -> Param -> Train -> [Tests] -> m (Maybe Model) => Param -> Param -> Train -> [Tests] -> m (Maybe Model)
grid _ _ _ [] = panic "Gargantext.Core.Text.List.Learn.grid : empty test data" grid _ _ _ [] = panicTrace "Gargantext.Core.Text.List.Learn.grid : empty test data"
grid s e tr te = do grid s e tr te = do
let let
grid' :: (MonadBase IO m) grid' :: (MonadBase IO m)
......
...@@ -25,7 +25,6 @@ import Data.Vector qualified as V ...@@ -25,7 +25,6 @@ import Data.Vector qualified as V
import GHC.Generics import GHC.Generics
import Gargantext.API.Ngrams.Types (NgramsTerm, NgramsPatch) import Gargantext.API.Ngrams.Types (NgramsTerm, NgramsPatch)
import Gargantext.Core.NodeStory (getNodesArchiveHistory) import Gargantext.Core.NodeStory (getNodesArchiveHistory)
import Gargantext.Core.Errors.Types (panicTrace)
import Gargantext.Core.Text.List.Social.Find (findListsId) import Gargantext.Core.Text.List.Social.Find (findListsId)
import Gargantext.Core.Text.List.Social.Patch (addScorePatches) import Gargantext.Core.Text.List.Social.Patch (addScorePatches)
import Gargantext.Core.Text.List.Social.Prelude (FlowCont, FlowListScores) import Gargantext.Core.Text.List.Social.Prelude (FlowCont, FlowListScores)
......
...@@ -47,11 +47,11 @@ tficf :: TficfContext Count Total ...@@ -47,11 +47,11 @@ tficf :: TficfContext Count Total
tficf (TficfInfra (Count ic) (Total it) ) tficf (TficfInfra (Count ic) (Total it) )
(TficfSupra (Count sc) (Total st) ) (TficfSupra (Count sc) (Total st) )
| it >= ic && st >= sc && it <= st = (it/ic) * log (st/sc) | it >= ic && st >= sc && it <= st = (it/ic) * log (st/sc)
| otherwise = panic | otherwise = panicTrace
$ "[ERR]" $ "[ERR]"
<> path <> path
<> " Frequency impossible" <> " Frequency impossible"
tficf _ _ = panic $ "[ERR]" <> path <> "Undefined for these contexts" tficf _ _ = panicTrace $ "[ERR]" <> path <> "Undefined for these contexts"
sortTficf :: Ordering sortTficf :: Ordering
......
...@@ -46,7 +46,6 @@ import Data.Text qualified as Text ...@@ -46,7 +46,6 @@ import Data.Text qualified as Text
import Data.Traversable import Data.Traversable
import GHC.Base (String) import GHC.Base (String)
import Gargantext.Core import Gargantext.Core
import Gargantext.Core.Errors.Types (panicTrace)
import Gargantext.Core.Text (sentences, HasText(..)) import Gargantext.Core.Text (sentences, HasText(..))
import Gargantext.Core.Text.Terms.Eleve (mainEleveWith, Tries, Token, buildTries, toToken) import Gargantext.Core.Text.Terms.Eleve (mainEleveWith, Tries, Token, buildTries, toToken)
import Gargantext.Core.Text.Terms.Mono (monoTerms) import Gargantext.Core.Text.Terms.Mono (monoTerms)
......
...@@ -52,7 +52,7 @@ stem lang = DT.pack . N.stem lang' . DT.unpack ...@@ -52,7 +52,7 @@ stem lang = DT.pack . N.stem lang' . DT.unpack
lang' = case lang of lang' = case lang of
EN -> N.English EN -> N.English
FR -> N.French FR -> N.French
_ -> panic $ DT.pack "not implemented yet" _ -> panicTrace $ DT.pack "not implemented yet"
...@@ -67,7 +67,7 @@ tokenTags (NLPServerConfig { server = Spacy, url }) l txt = do ...@@ -67,7 +67,7 @@ tokenTags (NLPServerConfig { server = Spacy, url }) l txt = do
-- if txt == "" -- if txt == ""
-- then pure [[]] -- then pure [[]]
-- else tokenTagsWith FR txt SpacyNLP.nlp -- else tokenTagsWith FR txt SpacyNLP.nlp
tokenTags _ l _ = panic $ "[G.C.T.T.Multi] Lang NLP API not implemented yet " <> (show l) tokenTags _ l _ = panicTrace $ "[G.C.T.T.Multi] Lang NLP API not implemented yet " <> (show l)
tokenTagsWith :: Lang -> Text -> NLP_API -> IO [[TokenTag]] tokenTagsWith :: Lang -> Text -> NLP_API -> IO [[TokenTag]]
tokenTagsWith lang txt nlp = map (groupTokens lang) tokenTagsWith lang txt nlp = map (groupTokens lang)
......
...@@ -30,7 +30,6 @@ import Gargantext.Core.Types (TermsCount) ...@@ -30,7 +30,6 @@ import Gargantext.Core.Types (TermsCount)
import Gargantext.Core.Utils (groupWithCounts) import Gargantext.Core.Utils (groupWithCounts)
import Gargantext.Prelude hiding (concat) import Gargantext.Prelude hiding (concat)
import GHC.Exts (sortWith) import GHC.Exts (sortWith)
import Prelude (error)
------------------------------------------------------------------------ ------------------------------------------------------------------------
...@@ -77,8 +76,8 @@ buildPatterns = sortWith (Down . _pat_length) . concatMap buildPattern ...@@ -77,8 +76,8 @@ buildPatterns = sortWith (Down . _pat_length) . concatMap buildPattern
where where
buildPattern (label, alts) = map f $ map (\alt -> filter (/= "") alt) (label : alts) buildPattern (label, alts) = map f $ map (\alt -> filter (/= "") alt) (label : alts)
where where
f alt | "" `elem` alt = error ("buildPatterns: ERR1" <> show(label)) f alt | "" `elem` alt = errorTrace ("buildPatterns: ERR1" <> show(label))
| null alt = error "buildPatterns: ERR2" | null alt = errorTrace "buildPatterns: ERR2"
| otherwise = | otherwise =
Pattern (KMP.build alt) (length alt) label Pattern (KMP.build alt) (length alt) label
--(Terms label $ Set.empty) -- TODO check stems --(Terms label $ Set.empty) -- TODO check stems
......
...@@ -72,7 +72,7 @@ instance FromHttpApiData ListType where ...@@ -72,7 +72,7 @@ instance FromHttpApiData ListType where
parseUrlPiece s = Right s' parseUrlPiece s = Right s'
where where
s' = case (readMaybe $ unpack s) of s' = case (readMaybe $ unpack s) of
Nothing -> panic $ "Cannot read url piece: " <> s Nothing -> panicTrace $ "Cannot read url piece: " <> s
Just s'' -> s'' Just s'' -> s''
instance ToHttpApiData ListType where instance ToHttpApiData ListType where
toUrlPiece = pack . show toUrlPiece = pack . show
......
...@@ -242,7 +242,7 @@ mkObject gvid commonData objectTypeData = ...@@ -242,7 +242,7 @@ mkObject gvid commonData objectTypeData =
in case (commonDataJSON, objectTypeDataJSON, header) of in case (commonDataJSON, objectTypeDataJSON, header) of
(Object hdr, Object cdJSON, Object etDataJSON) (Object hdr, Object cdJSON, Object etDataJSON)
-> Object $ hdr <> cdJSON <> etDataJSON -> Object $ hdr <> cdJSON <> etDataJSON
_ -> panic "[Gargantext.Core.Types.Phylo.mkObject] impossible: commonData, header or objectTypeDataJSON didn't convert back to JSON Object." _ -> panicTrace "[Gargantext.Core.Types.Phylo.mkObject] impossible: commonData, header or objectTypeDataJSON didn't convert back to JSON Object."
data GroupToNodeData data GroupToNodeData
= GroupToNodeData = GroupToNodeData
...@@ -491,7 +491,7 @@ mkGraphData GraphData{..} = ...@@ -491,7 +491,7 @@ mkGraphData GraphData{..} =
datJSON = toJSON _gd_data datJSON = toJSON _gd_data
in case (hdrJSON, datJSON) of in case (hdrJSON, datJSON) of
(Object a, Object b) -> Object $ a <> b (Object a, Object b) -> Object $ a <> b
_ -> panic "[Gargantext.Core.Types.Phylo.mkGraphData] impossible: header or data didn't convert back to JSON Object." _ -> panicTrace "[Gargantext.Core.Types.Phylo.mkGraphData] impossible: header or data didn't convert back to JSON Object."
instance FromJSON GraphData where instance FromJSON GraphData where
parseJSON = withObject "GraphData" $ \o -> do parseJSON = withObject "GraphData" $ \o -> do
...@@ -529,7 +529,7 @@ mkEdge edgeType gvid commonData edgeTypeData = ...@@ -529,7 +529,7 @@ mkEdge edgeType gvid commonData edgeTypeData =
in case (commonDataJSON, edgeTypeDataJSON, header) of in case (commonDataJSON, edgeTypeDataJSON, header) of
(Object hdr, Object cdJSON, Object etDataJSON) (Object hdr, Object cdJSON, Object etDataJSON)
-> Object $ hdr <> cdJSON <> etDataJSON -> Object $ hdr <> cdJSON <> etDataJSON
_ -> panic "[Gargantext.Core.Types.Phylo.mkEdge] impossible: commonData, header or edgeTypeDataJSON didn't convert back to JSON Object." _ -> panicTrace "[Gargantext.Core.Types.Phylo.mkEdge] impossible: commonData, header or edgeTypeDataJSON didn't convert back to JSON Object."
instance FromJSON EdgeData where instance FromJSON EdgeData where
......
...@@ -27,7 +27,6 @@ import Data.List qualified as List ...@@ -27,7 +27,6 @@ import Data.List qualified as List
import Data.Maybe import Data.Maybe
import Data.Monoid import Data.Monoid
import Data.Text qualified as T import Data.Text qualified as T
import Gargantext.Core.Errors.Types
import Gargantext.Core.Utils.Prefix import Gargantext.Core.Utils.Prefix
import Gargantext.Prelude import Gargantext.Prelude
import Prelude ((!!)) import Prelude ((!!))
......
...@@ -19,7 +19,6 @@ import Data.ByteString.Lazy as DBL (readFile, writeFile) ...@@ -19,7 +19,6 @@ import Data.ByteString.Lazy as DBL (readFile, writeFile)
import Data.HashMap.Strict (HashMap, lookup) import Data.HashMap.Strict (HashMap, lookup)
import Data.Text qualified as Text import Data.Text qualified as Text
import Gargantext.API.Ngrams.Types (NgramsTerm(..), NgramsRepoElement(..), mSetToList) import Gargantext.API.Ngrams.Types (NgramsTerm(..), NgramsRepoElement(..), mSetToList)
import Gargantext.Core.Errors.Types (panicTrace)
import Gargantext.Core.Viz.Graph.Types import Gargantext.Core.Viz.Graph.Types
import Gargantext.Database.Admin.Types.Hyperdata.Prelude import Gargantext.Database.Admin.Types.Hyperdata.Prelude
import Gargantext.Database.Schema.Ngrams (NgramsType(..)) import Gargantext.Database.Schema.Ngrams (NgramsType(..))
......
...@@ -143,7 +143,7 @@ getPhyloDataJson phyloId = do ...@@ -143,7 +143,7 @@ getPhyloDataJson phyloId = do
let phyloData = fromMaybe phyloCleopatre maybePhyloData let phyloData = fromMaybe phyloCleopatre maybePhyloData
phyloJson <- liftBase $ phylo2dot2json phyloData phyloJson <- liftBase $ phylo2dot2json phyloData
case parseEither parseJSON phyloJson of case parseEither parseJSON phyloJson of
Left err -> panic $ T.pack $ "[Gargantext.Core.Viz.Phylo.API] getPhyloDataJson: " <> err Left err -> panicTrace $ T.pack $ "[Gargantext.Core.Viz.Phylo.API] getPhyloDataJson: " <> err
Right gd -> pure gd Right gd -> pure gd
...@@ -173,7 +173,7 @@ postPhylo phyloId _lId = do ...@@ -173,7 +173,7 @@ postPhylo phyloId _lId = do
-- _sft = Just (Software "Gargantext" "4") -- _sft = Just (Software "Gargantext" "4")
-- _prm = initPhyloParam vrs sft (Just q) -- _prm = initPhyloParam vrs sft (Just q)
corpusId <- getClosestParentIdByType phyloId NodeCorpus corpusId <- getClosestParentIdByType phyloId NodeCorpus
phy <- flowPhyloAPI defaultConfig (fromMaybe (panic "[G.C.V.P.API] no corpus ID found") corpusId) -- params phy <- flowPhyloAPI defaultConfig (fromMaybe (panicTrace "[G.C.V.P.API] no corpus ID found") corpusId) -- params
-- phyloId <- insertNodes [node NodePhylo "Phylo" (HyperdataPhylo Nothing (Just phy)) (Just corpusId) userId] -- phyloId <- insertNodes [node NodePhylo "Phylo" (HyperdataPhylo Nothing (Just phy)) (Just corpusId) userId]
_ <- updateHyperdata phyloId (HyperdataPhylo Nothing (Just phy)) _ <- updateHyperdata phyloId (HyperdataPhylo Nothing (Just phy))
pure phyloId pure phyloId
......
...@@ -46,7 +46,7 @@ deleteNode :: (CmdCommon env, HasNodeError err) ...@@ -46,7 +46,7 @@ deleteNode :: (CmdCommon env, HasNodeError err)
deleteNode u nodeId = do deleteNode u nodeId = do
node' <- N.getNode nodeId node' <- N.getNode nodeId
case (view node_typename node') of case (view node_typename node') of
nt | nt == toDBid NodeUser -> panic "[G.D.A.D.deleteNode] Not allowed to delete NodeUser (yet)" nt | nt == toDBid NodeUser -> panicTrace "[G.D.A.D.deleteNode] Not allowed to delete NodeUser (yet)"
nt | nt == toDBid NodeTeam -> do nt | nt == toDBid NodeTeam -> do
uId <- getUserId u uId <- getUserId u
if _node_user_id node' == uId if _node_user_id node' == uId
......
...@@ -264,7 +264,7 @@ flowCorpusFile u n _l la ft ff fp mfslw jobHandle = do ...@@ -264,7 +264,7 @@ flowCorpusFile u n _l la ft ff fp mfslw jobHandle = do
flowCorpus u n la mfslw (fromIntegral $ length parsed, yieldMany parsed .| mapC toHyperdataDocument) jobHandle flowCorpus u n la mfslw (fromIntegral $ length parsed, yieldMany parsed .| mapC toHyperdataDocument) jobHandle
--let docs = splitEvery 500 $ take l parsed --let docs = splitEvery 500 $ take l parsed
--flowCorpus u n la mfslw (yieldMany $ map (map toHyperdataDocument) docs) logStatus --flowCorpus u n la mfslw (yieldMany $ map (map toHyperdataDocument) docs) logStatus
Left e -> panic $ "Error: " <> e Left e -> panicTrace $ "Error: " <> e
------------------------------------------------------------------------ ------------------------------------------------------------------------
-- | TODO improve the needed type to create/update a corpus -- | TODO improve the needed type to create/update a corpus
...@@ -510,7 +510,7 @@ viewUniqId' :: UniqId a ...@@ -510,7 +510,7 @@ viewUniqId' :: UniqId a
-> (Hash, a) -> (Hash, a)
viewUniqId' d = maybe err (\h -> (h,d)) (view uniqId d) viewUniqId' d = maybe err (\h -> (h,d)) (view uniqId d)
where where
err = panic "[ERROR] Database.Flow.toInsert" err = panicTrace "[ERROR] Database.Flow.toInsert"
toInserted :: [ReturnId] toInserted :: [ReturnId]
......
...@@ -189,7 +189,7 @@ databaseParameters fp = do ...@@ -189,7 +189,7 @@ databaseParameters fp = do
let val' key = unpack $ val ini "database" key let val' key = unpack $ val ini "database" key
let dbPortRaw = val' "DB_PORT" let dbPortRaw = val' "DB_PORT"
let dbPort = case (readMaybe dbPortRaw :: Maybe Word16) of let dbPort = case (readMaybe dbPortRaw :: Maybe Word16) of
Nothing -> panic $ "DB_PORT incorrect: " <> (pack dbPortRaw) Nothing -> panicTrace $ "DB_PORT incorrect: " <> (pack dbPortRaw)
Just d -> d Just d -> d
pure $ PGS.ConnectInfo { PGS.connectHost = val' "DB_HOST" pure $ PGS.ConnectInfo { PGS.connectHost = val' "DB_HOST"
......
...@@ -350,7 +350,7 @@ insertNodesWithParentR pid ns = insertNodesR (set node_parent_id (pgNodeId <$> p ...@@ -350,7 +350,7 @@ insertNodesWithParentR pid ns = insertNodesR (set node_parent_id (pgNodeId <$> p
node2table :: HasDBid NodeType node2table :: HasDBid NodeType
=> UserId -> Maybe ParentId -> Node' -> NodeWrite => UserId -> Maybe ParentId -> Node' -> NodeWrite
node2table uid pid (Node' nt txt v []) = Node Nothing Nothing (sqlInt4 $ toDBid nt) (sqlInt4 $ _UserId uid) (fmap pgNodeId pid) (sqlStrictText txt) Nothing (sqlStrictJSONB $ cs $ encode v) node2table uid pid (Node' nt txt v []) = Node Nothing Nothing (sqlInt4 $ toDBid nt) (sqlInt4 $ _UserId uid) (fmap pgNodeId pid) (sqlStrictText txt) Nothing (sqlStrictJSONB $ cs $ encode v)
node2table _ _ (Node' _ _ _ _) = panic "node2table: should not happen, Tree insert not implemented yet" node2table _ _ (Node' _ _ _ _) = panicTrace "node2table: should not happen, Tree insert not implemented yet"
data Node' = Node' { _n_type :: NodeType data Node' = Node' { _n_type :: NodeType
...@@ -371,7 +371,7 @@ childWith :: HasDBid NodeType ...@@ -371,7 +371,7 @@ childWith :: HasDBid NodeType
=> UserId -> ParentId -> Node' -> NodeWrite => UserId -> ParentId -> Node' -> NodeWrite
childWith uId pId (Node' NodeDocument txt v []) = node2table uId (Just pId) (Node' NodeDocument txt v []) childWith uId pId (Node' NodeDocument txt v []) = node2table uId (Just pId) (Node' NodeDocument txt v [])
childWith uId pId (Node' NodeContact txt v []) = node2table uId (Just pId) (Node' NodeContact txt v []) childWith uId pId (Node' NodeContact txt v []) = node2table uId (Just pId) (Node' NodeContact txt v [])
childWith _ _ (Node' _ _ _ _) = panic "This NodeType can not be a child" childWith _ _ (Node' _ _ _ _) = panicTrace "This NodeType can not be a child"
-- =================================================================== -- -- =================================================================== --
......
...@@ -25,7 +25,7 @@ import Opaleye (limit) ...@@ -25,7 +25,7 @@ import Opaleye (limit)
getNodeUser :: NodeId -> DBCmd err (Node HyperdataUser) getNodeUser :: NodeId -> DBCmd err (Node HyperdataUser)
getNodeUser nId = do getNodeUser nId = do
fromMaybe (panic $ "Node does not exist: " <> (show nId)) . headMay fromMaybe (panicTrace $ "Node does not exist: " <> (show nId)) . headMay
<$> runOpaQuery (limit 1 $ selectNode (pgNodeId nId)) <$> runOpaQuery (limit 1 $ selectNode (pgNodeId nId))
nodeUserW :: HasDBid NodeType => Maybe Name -> Maybe HyperdataUser -> UserId -> NodeWrite nodeUserW :: HasDBid NodeType => Maybe Name -> Maybe HyperdataUser -> UserId -> NodeWrite
......
...@@ -221,7 +221,7 @@ instance Functor NgramsT where ...@@ -221,7 +221,7 @@ instance Functor NgramsT where
----------------------------------------------------------------------- -----------------------------------------------------------------------
withMap :: HashMap Text NgramsId -> Text -> NgramsId withMap :: HashMap Text NgramsId -> Text -> NgramsId
withMap m n = maybe (panic $ "[G.D.S.Ngrams.withMap] Should not happen" <> (show n)) withMap m n = maybe (panicTrace $ "[G.D.S.Ngrams.withMap] Should not happen" <> (show n))
identity (HashMap.lookup n m) identity (HashMap.lookup n m)
indexNgramsT :: HashMap Text NgramsId -> NgramsT Ngrams -> NgramsT (Indexed Int Ngrams) indexNgramsT :: HashMap Text NgramsId -> NgramsT Ngrams -> NgramsT (Indexed Int Ngrams)
......
...@@ -21,7 +21,6 @@ import Data.List.Safe qualified as LS ...@@ -21,7 +21,6 @@ import Data.List.Safe qualified as LS
import Data.Map.Strict qualified as Map import Data.Map.Strict qualified as Map
import Data.Text hiding (map, group, filter, concat, zip) import Data.Text hiding (map, group, filter, concat, zip)
import Gargantext.Core (Lang(..)) import Gargantext.Core (Lang(..))
import Gargantext.Core.Errors.Types
import Gargantext.Core.Text.Terms.Multi.PosTagging.Types import Gargantext.Core.Text.Terms.Multi.PosTagging.Types
import Gargantext.Core.Types (POS(..)) import Gargantext.Core.Types (POS(..))
import Gargantext.Core.Utils.Prefix (unPrefix) import Gargantext.Core.Utils.Prefix (unPrefix)
......
...@@ -22,7 +22,7 @@ allow-newer: true ...@@ -22,7 +22,7 @@ allow-newer: true
extra-deps: extra-deps:
- git: https://gitlab.iscpif.fr/gargantext/haskell-gargantext-prelude - git: https://gitlab.iscpif.fr/gargantext/haskell-gargantext-prelude
commit: fec7427ba8d1047fd68207afb79139f9dea339e0 commit: 5a8dc3a0a1a4774ec2eb9df5f0f0b0a7dd172f09
- git: https://gitlab.iscpif.fr/gargantext/iso639.git - git: https://gitlab.iscpif.fr/gargantext/iso639.git
commit: eab929d106833ded8011a0d6705135e3fc506a9c commit: eab929d106833ded8011a0d6705135e3fc506a9c
- git: https://gitlab.iscpif.fr/gargantext/gargantext-graph.git - git: https://gitlab.iscpif.fr/gargantext/gargantext-graph.git
......
...@@ -31,7 +31,6 @@ import Gargantext.Database.Schema.Ngrams ...@@ -31,7 +31,6 @@ import Gargantext.Database.Schema.Ngrams
import Gargantext.Prelude hiding (get) import Gargantext.Prelude hiding (get)
import Network.Wai.Handler.Warp qualified as Wai import Network.Wai.Handler.Warp qualified as Wai
import Paths_gargantext (getDataFileName) import Paths_gargantext (getDataFileName)
import Prelude (error)
import Test.API.Private (withValidLogin, protectedJSON, postJSONUrlEncoded, getJSON) import Test.API.Private (withValidLogin, protectedJSON, postJSONUrlEncoded, getJSON)
import Test.API.Setup (withTestDBAndPort, setupEnvironment, mkUrl, createAliceAndBob) import Test.API.Setup (withTestDBAndPort, setupEnvironment, mkUrl, createAliceAndBob)
import Test.Database.Types import Test.Database.Types
...@@ -84,7 +83,7 @@ pollUntilFinished :: HasCallStack ...@@ -84,7 +83,7 @@ pollUntilFinished :: HasCallStack
pollUntilFinished tkn port mkUrlPiece = go 60 pollUntilFinished tkn port mkUrlPiece = go 60
where where
go :: Int -> JobPollHandle -> WaiSession () JobPollHandle go :: Int -> JobPollHandle -> WaiSession () JobPollHandle
go 0 h = error $ T.unpack $ "pollUntilFinished exhausted attempts. Last found JobPollHandle: " <> T.decodeUtf8 (BL.toStrict $ JSON.encode h) go 0 h = panicTrace $ "pollUntilFinished exhausted attempts. Last found JobPollHandle: " <> T.decodeUtf8 (BL.toStrict $ JSON.encode h)
go n h = case _jph_status h == "IsPending" || _jph_status h == "IsRunning" of go n h = case _jph_status h == "IsPending" || _jph_status h == "IsRunning" of
True -> do True -> do
liftIO $ threadDelay 1_000_000 liftIO $ threadDelay 1_000_000
...@@ -92,7 +91,7 @@ pollUntilFinished tkn port mkUrlPiece = go 60 ...@@ -92,7 +91,7 @@ pollUntilFinished tkn port mkUrlPiece = go 60
go (n-1) h' go (n-1) h'
False False
| _jph_status h == "IsFailure" | _jph_status h == "IsFailure"
-> error $ T.unpack $ "JobPollHandle contains a failure: " <> T.decodeUtf8 (BL.toStrict $ JSON.encode h) -> panicTrace $ "JobPollHandle contains a failure: " <> T.decodeUtf8 (BL.toStrict $ JSON.encode h)
| otherwise | otherwise
-> pure h -> pure h
......
...@@ -3,8 +3,8 @@ ...@@ -3,8 +3,8 @@
module Test.Offline.Errors (tests) where module Test.Offline.Errors (tests) where
import Control.Exception import Control.Exception
import Gargantext.Prelude.Error
import Gargantext.Core (fromDBid) import Gargantext.Core (fromDBid)
import Gargantext.Core.Errors.Types
import Gargantext.Database.Admin.Config () import Gargantext.Database.Admin.Config ()
import Gargantext.Database.Admin.Types.Node import Gargantext.Database.Admin.Types.Node
import Prelude import Prelude
......
...@@ -228,6 +228,7 @@ instance MonadJobStatus MyDummyMonad where ...@@ -228,6 +228,7 @@ instance MonadJobStatus MyDummyMonad where
type JobOutputType MyDummyMonad = JobLog type JobOutputType MyDummyMonad = JobLog
type JobEventType MyDummyMonad = JobLog type JobEventType MyDummyMonad = JobLog
noJobHandle _ = noJobHandle (Proxy :: Proxy (GargM Env BackendInternalError))
getLatestJobStatus jId = MyDummyMonad (getLatestJobStatus jId) getLatestJobStatus jId = MyDummyMonad (getLatestJobStatus jId)
withTracer _ jh n = n jh withTracer _ jh n = n jh
markStarted n jh = MyDummyMonad (markStarted n jh) markStarted n jh = MyDummyMonad (markStarted n jh)
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment