[protolude] toUtf8 rewrite

parent c5fd09cc
...@@ -94,7 +94,7 @@ main = do ...@@ -94,7 +94,7 @@ main = do
-- r <- mapConcurrentlyChunked (filterTermsAndCooc patterns) (DM.toList corpus) -- r <- mapConcurrentlyChunked (filterTermsAndCooc patterns) (DM.toList corpus)
r <- mapConcurrently (filterTermsAndCooc patterns) (DM.toList corpus) r <- mapConcurrently (filterTermsAndCooc patterns) (DM.toList corpus)
writeFile outputFile $ DTL.toStrict $ TLE.decodeUtf8 $ encode (CoocByYears r) writeFile outputFile $ DTL.toStrict $ decodeUtf8 $ encode (CoocByYears r)
Left e -> panic $ "Error: " <> e Left e -> panic $ "Error: " <> e
......
...@@ -137,8 +137,8 @@ fireWall req fw = do ...@@ -137,8 +137,8 @@ fireWall req fw = do
let origin = lookup "Origin" (requestHeaders req) let origin = lookup "Origin" (requestHeaders req)
let host = lookup "Host" (requestHeaders req) let host = lookup "Host" (requestHeaders req)
if origin == Just (encodeUtf8 "http://localhost:8008") if origin == Just (toUtf8 ("http://localhost:8008" :: Text))
&& host == Just (encodeUtf8 "localhost:3000") && host == Just (toUtf8 ("localhost:3000" :: Text))
|| (not $ unFireWall fw) || (not $ unFireWall fw)
then pure True then pure True
......
...@@ -17,9 +17,11 @@ module Gargantext.API.Errors ( ...@@ -17,9 +17,11 @@ module Gargantext.API.Errors (
, showAsServantJSONErr , showAsServantJSONErr
) where ) where
import Prelude
import Control.Exception import Control.Exception
import Data.Aeson qualified as JSON
import Data.Text qualified as T
import Data.Text.Lazy qualified as TL
import Data.Text.Lazy.Encoding qualified as TE
import Data.Validity ( prettyValidation ) import Data.Validity ( prettyValidation )
import Gargantext.API.Admin.Auth.Types import Gargantext.API.Admin.Auth.Types
import Gargantext.API.Errors.Class as Class import Gargantext.API.Errors.Class as Class
...@@ -27,13 +29,10 @@ import Gargantext.API.Errors.TH (deriveHttpStatusCode) ...@@ -27,13 +29,10 @@ import Gargantext.API.Errors.TH (deriveHttpStatusCode)
import Gargantext.API.Errors.Types as Types import Gargantext.API.Errors.Types as Types
import Gargantext.Database.Query.Table.Node.Error hiding (nodeError) import Gargantext.Database.Query.Table.Node.Error hiding (nodeError)
import Gargantext.Database.Query.Tree hiding (treeError) import Gargantext.Database.Query.Tree hiding (treeError)
import Gargantext.Prelude
import Gargantext.Utils.Jobs.Monad (JobError(..)) import Gargantext.Utils.Jobs.Monad (JobError(..))
import Network.HTTP.Types.Status qualified as HTTP
import Servant.Server import Servant.Server
import qualified Data.Aeson as JSON
import qualified Data.Text as T
import qualified Network.HTTP.Types.Status as HTTP
import qualified Data.Text.Lazy.Encoding as TE
import qualified Data.Text.Lazy as TL
$(deriveHttpStatusCode ''BackendErrorCode) $(deriveHttpStatusCode ''BackendErrorCode)
...@@ -94,8 +93,8 @@ nodeErrorToFrontendError ne = case ne of ...@@ -94,8 +93,8 @@ nodeErrorToFrontendError ne = case ne of
-> mkFrontendErrShow FE_node_root_not_found -> mkFrontendErrShow FE_node_root_not_found
NoCorpusFound NoCorpusFound
-> mkFrontendErrShow FE_node_corpus_not_found -> mkFrontendErrShow FE_node_corpus_not_found
NoUserFound _ur NoUserFound ur
-> undefined -> mkFrontendErrShow $ FE_user_not_found ur
NodeCreationFailed reason NodeCreationFailed reason
-> case reason of -> case reason of
UserParentAlreadyExists pId uId UserParentAlreadyExists pId uId
......
...@@ -192,6 +192,10 @@ newtype instance ToFrontendErrorData 'EC_404__node_lookup_failed_not_found = ...@@ -192,6 +192,10 @@ newtype instance ToFrontendErrorData 'EC_404__node_lookup_failed_not_found =
FE_node_lookup_failed_not_found { nenf_node_id :: NodeId } FE_node_lookup_failed_not_found { nenf_node_id :: NodeId }
deriving (Show, Eq, Generic) deriving (Show, Eq, Generic)
newtype instance ToFrontendErrorData 'EC_404__user_not_found =
FE_user_not_found { unf_user :: User }
deriving (Show, Eq, Generic)
newtype instance ToFrontendErrorData 'EC_404__node_lookup_failed_user_not_found = newtype instance ToFrontendErrorData 'EC_404__node_lookup_failed_user_not_found =
FE_node_lookup_failed_user_not_found { nenf_user_id :: UserId } FE_node_lookup_failed_user_not_found { nenf_user_id :: UserId }
deriving (Show, Eq, Generic) deriving (Show, Eq, Generic)
...@@ -216,8 +220,8 @@ newtype instance ToFrontendErrorData 'EC_404__node_context_not_found = ...@@ -216,8 +220,8 @@ newtype instance ToFrontendErrorData 'EC_404__node_context_not_found =
data instance ToFrontendErrorData 'EC_400__node_creation_failed_parent_exists = data instance ToFrontendErrorData 'EC_400__node_creation_failed_parent_exists =
FE_node_creation_failed_parent_exists { necpe_parent_id :: ParentId FE_node_creation_failed_parent_exists { necpe_parent_id :: ParentId
, necpe_user_id :: UserId , necpe_user_id :: UserId
} }
deriving (Show, Eq, Generic) deriving (Show, Eq, Generic)
newtype instance ToFrontendErrorData 'EC_400__node_creation_failed_no_parent = newtype instance ToFrontendErrorData 'EC_400__node_creation_failed_no_parent =
...@@ -226,8 +230,8 @@ newtype instance ToFrontendErrorData 'EC_400__node_creation_failed_no_parent = ...@@ -226,8 +230,8 @@ newtype instance ToFrontendErrorData 'EC_400__node_creation_failed_no_parent =
data instance ToFrontendErrorData 'EC_400__node_creation_failed_insert_node = data instance ToFrontendErrorData 'EC_400__node_creation_failed_insert_node =
FE_node_creation_failed_insert_node { necin_user_id :: UserId FE_node_creation_failed_insert_node { necin_user_id :: UserId
, necin_parent_id :: ParentId , necin_parent_id :: ParentId
} }
deriving (Show, Eq, Generic) deriving (Show, Eq, Generic)
newtype instance ToFrontendErrorData 'EC_500__node_generic_exception = newtype instance ToFrontendErrorData 'EC_500__node_generic_exception =
...@@ -338,6 +342,14 @@ instance FromJSON (ToFrontendErrorData 'EC_500__node_not_implemented_yet) where ...@@ -338,6 +342,14 @@ instance FromJSON (ToFrontendErrorData 'EC_500__node_not_implemented_yet) where
instance ToJSON (ToFrontendErrorData 'EC_404__node_lookup_failed_not_found) where instance ToJSON (ToFrontendErrorData 'EC_404__node_lookup_failed_not_found) where
toJSON (FE_node_lookup_failed_not_found nodeId) = object [ "node_id" .= toJSON nodeId ] toJSON (FE_node_lookup_failed_not_found nodeId) = object [ "node_id" .= toJSON nodeId ]
instance ToJSON (ToFrontendErrorData 'EC_404__user_not_found) where
toJSON (FE_user_not_found user) = object [ "user" .= toJSON user ]
instance FromJSON (ToFrontendErrorData 'EC_404__user_not_found) where
parseJSON = withObject "FE_user_not_found" $ \o -> do
unf_user <- o .: "user"
pure FE_user_not_found{..}
instance FromJSON (ToFrontendErrorData 'EC_404__node_lookup_failed_not_found) where instance FromJSON (ToFrontendErrorData 'EC_404__node_lookup_failed_not_found) where
parseJSON = withObject "FE_node_lookup_failed_not_found" $ \o -> do parseJSON = withObject "FE_node_lookup_failed_not_found" $ \o -> do
nenf_node_id <- o .: "node_id" nenf_node_id <- o .: "node_id"
...@@ -564,6 +576,9 @@ genFrontendErr be = do ...@@ -564,6 +576,9 @@ genFrontendErr be = do
EC_404__node_lookup_failed_not_found EC_404__node_lookup_failed_not_found
-> do nodeId <- arbitrary -> do nodeId <- arbitrary
pure $ mkFrontendErr' txt (FE_node_lookup_failed_not_found nodeId) pure $ mkFrontendErr' txt (FE_node_lookup_failed_not_found nodeId)
EC_404__user_not_found
-> do userId <- arbitrary
pure $ mkFrontendErr' txt (FE_user_not_found (UserDBId userId))
EC_404__node_lookup_failed_user_not_found EC_404__node_lookup_failed_user_not_found
-> do userId <- arbitrary -> do userId <- arbitrary
pure $ mkFrontendErr' txt (FE_node_lookup_failed_user_not_found userId) pure $ mkFrontendErr' txt (FE_node_lookup_failed_user_not_found userId)
...@@ -672,6 +687,9 @@ instance FromJSON FrontendError where ...@@ -672,6 +687,9 @@ instance FromJSON FrontendError where
EC_404__node_lookup_failed_not_found -> do EC_404__node_lookup_failed_not_found -> do
(fe_data :: ToFrontendErrorData 'EC_404__node_lookup_failed_not_found) <- o .: "data" (fe_data :: ToFrontendErrorData 'EC_404__node_lookup_failed_not_found) <- o .: "data"
pure FrontendError{..} pure FrontendError{..}
EC_404__user_not_found -> do
(fe_data :: ToFrontendErrorData 'EC_404__user_not_found) <- o .: "data"
pure FrontendError{..}
EC_404__node_lookup_failed_user_not_found -> do EC_404__node_lookup_failed_user_not_found -> do
(fe_data :: ToFrontendErrorData 'EC_404__node_lookup_failed_user_not_found) <- o .: "data" (fe_data :: ToFrontendErrorData 'EC_404__node_lookup_failed_user_not_found) <- o .: "data"
pure FrontendError{..} pure FrontendError{..}
......
...@@ -20,6 +20,7 @@ data BackendErrorCode ...@@ -20,6 +20,7 @@ data BackendErrorCode
| EC_404__node_root_not_found | EC_404__node_root_not_found
| EC_404__node_lookup_failed_not_found | EC_404__node_lookup_failed_not_found
| EC_400__node_lookup_failed_user_too_many_roots | EC_400__node_lookup_failed_user_too_many_roots
| EC_404__user_not_found
| EC_404__node_lookup_failed_user_not_found | EC_404__node_lookup_failed_user_not_found
| EC_404__node_lookup_failed_username_not_found | EC_404__node_lookup_failed_username_not_found
| EC_404__node_corpus_not_found | EC_404__node_corpus_not_found
......
...@@ -31,7 +31,7 @@ data AuthStatus = Valid | Invalid ...@@ -31,7 +31,7 @@ data AuthStatus = Valid | Invalid
authUser :: (HasSettings env) => NodeId -> Text -> Cmd' env err AuthStatus authUser :: (HasSettings env) => NodeId -> Text -> Cmd' env err AuthStatus
authUser ui_id token = do authUser ui_id token = do
let token' = encodeUtf8 token let token' = toUtf8 token
jwtS <- view $ settings . jwtSettings jwtS <- view $ settings . jwtSettings
u <- liftBase $ getUserFromToken jwtS token' u <- liftBase $ getUserFromToken jwtS token'
case u of case u of
......
...@@ -18,7 +18,6 @@ Portability : POSIX ...@@ -18,7 +18,6 @@ Portability : POSIX
module Gargantext.API.Ngrams.List module Gargantext.API.Ngrams.List
where where
import Data.ByteString.Lazy qualified as BSL
import Data.Csv qualified as Csv import Data.Csv qualified as Csv
import Data.HashMap.Strict (HashMap) import Data.HashMap.Strict (HashMap)
import Data.HashMap.Strict qualified as HashMap import Data.HashMap.Strict qualified as HashMap
...@@ -176,7 +175,7 @@ ngramsListFromCSVData csvData = case decodeCsv of ...@@ -176,7 +175,7 @@ ngramsListFromCSVData csvData = case decodeCsv of
Left err -> Left $ "Invalid CSV found in ngramsListFromCSVData: " <> err Left err -> Left $ "Invalid CSV found in ngramsListFromCSVData: " <> err
Right terms -> pure $ Map.fromList [ (NgramsTerms, Versioned 0 $ mconcat . Vec.toList $ terms) ] Right terms -> pure $ Map.fromList [ (NgramsTerms, Versioned 0 $ mconcat . Vec.toList $ terms) ]
where where
binaryData = BSL.fromStrict $ P.encodeUtf8 csvData binaryData = toUtf8Lazy csvData
decodeCsv :: Either Prelude.String (Vector NgramsTableMap) decodeCsv :: Either Prelude.String (Vector NgramsTableMap)
decodeCsv = Csv.decodeWithP csvToNgramsTableMap decodeCsv = Csv.decodeWithP csvToNgramsTableMap
......
...@@ -18,10 +18,8 @@ module Gargantext.API.Ngrams.List.Types where ...@@ -18,10 +18,8 @@ module Gargantext.API.Ngrams.List.Types where
--import Control.Lens hiding (elements, Indexed) --import Control.Lens hiding (elements, Indexed)
import Data.Aeson import Data.Aeson
import qualified Data.ByteString.Lazy as BSL
import Data.Swagger (ToSchema, declareNamedSchema, genericDeclareNamedSchema) import Data.Swagger (ToSchema, declareNamedSchema, genericDeclareNamedSchema)
import Data.Text import Data.Text
import qualified Data.Text.Encoding as E
import Servant.Job.Utils (jsonOptions) import Servant.Job.Utils (jsonOptions)
import Web.FormUrlEncoded (FromForm(..), ToForm, parseUnique) import Web.FormUrlEncoded (FromForm(..), ToForm, parseUnique)
...@@ -58,8 +56,8 @@ data WithJsonFile = WithJsonFile ...@@ -58,8 +56,8 @@ data WithJsonFile = WithJsonFile
instance FromForm WithJsonFile where instance FromForm WithJsonFile where
fromForm f = do fromForm f = do
d' <- parseUnique "_wjf_data" f d' <- parseUnique "_wjf_data" f :: Either Text Text
d <- case eitherDecode' (BSL.fromStrict $ E.encodeUtf8 d') of d <- case eitherDecode' (toUtf8Lazy d') of
Left s -> Left $ pack s Left s -> Left $ pack s
Right v -> Right v Right v -> Right v
n <- parseUnique "_wjf_name" f n <- parseUnique "_wjf_name" f
......
...@@ -30,7 +30,6 @@ import Data.ByteString.Base64 qualified as BSB64 ...@@ -30,7 +30,6 @@ import Data.ByteString.Base64 qualified as BSB64
import Data.Conduit.Internal (zipSources) import Data.Conduit.Internal (zipSources)
import Data.Swagger import Data.Swagger
import Data.Text qualified as T import Data.Text qualified as T
import Data.Text.Encoding qualified as TE
import EPO.API.Client.Types qualified as EPO import EPO.API.Client.Types qualified as EPO
import Gargantext.API.Admin.Orchestrator.Types (JobLog(..), AsyncJobs) import Gargantext.API.Admin.Orchestrator.Types (JobLog(..), AsyncJobs)
import Gargantext.API.Admin.Types (HasSettings) import Gargantext.API.Admin.Types (HasSettings)
...@@ -306,7 +305,7 @@ addToCorpusWithForm user cid nwf jobHandle = do ...@@ -306,7 +305,7 @@ addToCorpusWithForm user cid nwf jobHandle = do
-- TODO granularity of the logStatus -- TODO granularity of the logStatus
let data' = case (nwf ^. wf_fileformat) of let data' = case (nwf ^. wf_fileformat) of
Plain -> cs (nwf ^. wf_data) Plain -> cs (nwf ^. wf_data)
ZIP -> case BSB64.decode $ TE.encodeUtf8 (nwf ^. wf_data) of ZIP -> case BSB64.decode $ toUtf8 (nwf ^. wf_data) of
Left err -> panic $ T.pack "[addToCorpusWithForm] error decoding base64: " <> T.pack err Left err -> panic $ T.pack "[addToCorpusWithForm] error decoding base64: " <> T.pack err
Right decoded -> decoded Right decoded -> decoded
eDocsC <- liftBase $ parseC (nwf ^. wf_fileformat) data' eDocsC <- liftBase $ parseC (nwf ^. wf_fileformat) data'
......
...@@ -108,11 +108,11 @@ fetchSearxPage (FetchSearxParams { _fsp_language ...@@ -108,11 +108,11 @@ fetchSearxPage (FetchSearxParams { _fsp_language
req <- parseRequest $ T.unpack _fsp_url req <- parseRequest $ T.unpack _fsp_url
let request = urlEncodedBody let request = urlEncodedBody
[ --("category_general", "1") [ --("category_general", "1")
("q", encodeUtf8 _fsp_query) ("q", toUtf8 _fsp_query)
, ("categories", "news") -- https://gitlab.iscpif.fr/gargantext/haskell-gargantext/issues/70#note_3976 , ("categories", "news") -- https://gitlab.iscpif.fr/gargantext/haskell-gargantext/issues/70#note_3976
, ("pageno", encodeUtf8 $ T.pack $ show _fsp_pageno) , ("pageno", toUtf8 $ T.pack $ show _fsp_pageno)
--, ("time_range", "None") --, ("time_range", "None")
, ("language", encodeUtf8 $ langToSearx _fsp_language) , ("language", toUtf8 $ langToSearx _fsp_language)
, ("format", "json") , ("format", "json")
] req ] req
res <- httpLbs request _fsp_manager res <- httpLbs request _fsp_manager
......
...@@ -16,15 +16,11 @@ module Gargantext.API.Node.Document.Export.Types where ...@@ -16,15 +16,11 @@ module Gargantext.API.Node.Document.Export.Types where
import Data.Aeson.TH (deriveJSON) import Data.Aeson.TH (deriveJSON)
import Data.Csv (DefaultOrdered(..), ToNamedRecord(..), (.=), header, namedRecord) import Data.Csv (DefaultOrdered(..), ToNamedRecord(..), (.=), header, namedRecord)
import Data.Swagger import Data.Swagger
--import qualified Data.Text as T
import qualified Data.Text.Encoding as TE
import Gargantext.Core.Types import Gargantext.Core.Types
import Gargantext.Core.Utils.Prefix (unPrefix, unPrefixSwagger) import Gargantext.Core.Utils.Prefix (unPrefix, unPrefixSwagger)
import Gargantext.Database.Admin.Types.Hyperdata (HyperdataDocument(..)) import Gargantext.Database.Admin.Types.Hyperdata (HyperdataDocument(..))
import Gargantext.Database.Schema.Node (NodePoly(..)) import Gargantext.Database.Schema.Node (NodePoly(..))
--import Gargantext.Utils.Servant (CSV)
import Protolude import Protolude
--import Protolude.Partial (read)
import Servant import Servant
...@@ -58,8 +54,8 @@ instance ToNamedRecord Document where ...@@ -58,8 +54,8 @@ instance ToNamedRecord Document where
, "Publication Year" .= _hd_publication_year _node_hyperdata , "Publication Year" .= _hd_publication_year _node_hyperdata
, "Authors" .= _hd_authors _node_hyperdata , "Authors" .= _hd_authors _node_hyperdata
, "Title" .= _hd_title _node_hyperdata , "Title" .= _hd_title _node_hyperdata
, "Source" .= (TE.encodeUtf8 <$> _hd_source _node_hyperdata) , "Source" .= (toUtf8 <$> _hd_source _node_hyperdata)
, "Abstract" .= (TE.encodeUtf8 <$> _hd_abstract _node_hyperdata) ] , "Abstract" .= (toUtf8 <$> _hd_abstract _node_hyperdata) ]
data Ngrams = data Ngrams =
Ngrams { _ng_ngrams :: [Text] Ngrams { _ng_ngrams :: [Text]
......
...@@ -69,7 +69,7 @@ instance ToSchema NewWithFile where ...@@ -69,7 +69,7 @@ instance ToSchema NewWithFile where
instance GargDB.SaveFile NewWithFile where instance GargDB.SaveFile NewWithFile where
saveFile' fp (NewWithFile b64d _ _) = do saveFile' fp (NewWithFile b64d _ _) = do
let eDecoded = BSB64.decode $ encodeUtf8 b64d let eDecoded = BSB64.decode $ toUtf8 b64d
case eDecoded of case eDecoded of
Left err -> panic $ T.pack $ "Error decoding: " <> err Left err -> panic $ T.pack $ "Error decoding: " <> err
Right decoded -> BS.writeFile fp decoded Right decoded -> BS.writeFile fp decoded
......
...@@ -14,15 +14,10 @@ Portability : POSIX ...@@ -14,15 +14,10 @@ Portability : POSIX
module Gargantext.API.Types where module Gargantext.API.Types where
import Data.Aeson import Data.Aeson
import qualified Data.ByteString.Lazy.Char8 as BS8 import Data.ByteString.Lazy.Char8 qualified as BS8
import Data.Either (Either(..))
import Data.List.NonEmpty (NonEmpty ((:|)))
import Data.Text (Text)
import qualified Data.Text.Encoding as E
import Data.Typeable import Data.Typeable
import Gargantext.Prelude
import Network.HTTP.Media ((//), (/:)) import Network.HTTP.Media ((//), (/:))
import Prelude (($))
import qualified Prelude
import Servant import Servant
( Accept(..) ( Accept(..)
, MimeRender(..) , MimeRender(..)
...@@ -32,12 +27,12 @@ data HTML deriving (Typeable) ...@@ -32,12 +27,12 @@ data HTML deriving (Typeable)
instance Accept HTML where instance Accept HTML where
contentTypes _ = "text" // "html" /: ("charset", "utf-8") :| ["text" // "html"] contentTypes _ = "text" // "html" /: ("charset", "utf-8") :| ["text" // "html"]
instance MimeRender HTML BS8.ByteString where instance MimeRender HTML BS8.ByteString where
mimeRender _ = Prelude.id mimeRender _ = identity
instance MimeUnrender HTML BS8.ByteString where instance MimeUnrender HTML BS8.ByteString where
mimeUnrender _ bs = Right bs mimeUnrender _ bs = Right bs
instance MimeRender HTML Text where instance MimeRender HTML Text where
mimeRender _ bs = BS8.fromStrict $ E.encodeUtf8 bs mimeRender _ = toUtf8Lazy
instance MimeUnrender HTML Text where instance MimeUnrender HTML Text where
mimeUnrender _ bs = Right $ E.decodeUtf8 $ BS8.toStrict bs mimeUnrender _ bs = Right $ decodeUtf8 $ BS8.toStrict bs
instance {-# OVERLAPPABLE #-} ToJSON a => MimeRender HTML a where instance {-# OVERLAPPABLE #-} ToJSON a => MimeRender HTML a where
mimeRender _ = encode mimeRender _ = encode
...@@ -82,10 +82,10 @@ convertQuery q = ESearch (interpretQuery q transformAST) ...@@ -82,10 +82,10 @@ convertQuery q = ESearch (interpretQuery q transformAST)
BFalse BFalse
-> mempty -> mempty
BConst (Positive (Term term)) BConst (Positive (Term term))
-> [QE (TE.encodeUtf8 term)] -> [QE (toUtf8 term)]
-- We can handle negatives via `ANDNOT` with itself. -- We can handle negatives via `ANDNOT` with itself.
BConst (Negative (Term term)) BConst (Negative (Term term))
-> [QN "NOT+", QE (TE.encodeUtf8 term)] -> [QN "NOT+", QE (toUtf8 term)]
get :: Text get :: Text
-> Corpus.RawQuery -> Corpus.RawQuery
......
...@@ -12,7 +12,6 @@ where ...@@ -12,7 +12,6 @@ where
import qualified Data.Map.Strict as Map import qualified Data.Map.Strict as Map
import qualified Data.Text as T import qualified Data.Text as T
import qualified Data.Text.Encoding as TE
import Gargantext.Utils.Servant (CSV, Markdown) import Gargantext.Utils.Servant (CSV, Markdown)
import Network.HTTP.Client (newManager, Request(..)) import Network.HTTP.Client (newManager, Request(..))
import Network.HTTP.Client.TLS (tlsManagerSettings) import Network.HTTP.Client.TLS (tlsManagerSettings)
...@@ -119,5 +118,5 @@ codimd (Host host) d = do ...@@ -119,5 +118,5 @@ codimd (Host host) d = do
Left (FailureResponse _req (Response { responseHeaders })) -> Left (FailureResponse _req (Response { responseHeaders })) ->
case Map.lookup "location" (Map.fromList $ toList responseHeaders) of case Map.lookup "location" (Map.fromList $ toList responseHeaders) of
Nothing -> Left "Cannot find 'Location' header in response" Nothing -> Left "Cannot find 'Location' header in response"
Just loc -> Right $ TE.decodeUtf8 loc Just loc -> Right $ decodeUtf8 loc
err -> Left $ "Error creating codimd document: " <> show err err -> Left $ "Error creating codimd document: " <> show err
...@@ -29,7 +29,10 @@ import Prelude qualified ...@@ -29,7 +29,10 @@ import Prelude qualified
-- FIXME UserName used twice -- FIXME UserName used twice
data User = UserDBId UserId | UserName Text | RootId NodeId data User = UserDBId UserId | UserName Text | RootId NodeId
deriving (Eq) deriving (Show, Eq, Generic)
instance ToJSON User
instance FromJSON User
renderUser :: User -> T.Text renderUser :: User -> T.Text
renderUser = \case renderUser = \case
......
...@@ -14,7 +14,6 @@ import qualified Data.ByteString.Lazy.Char8 as BSC ...@@ -14,7 +14,6 @@ import qualified Data.ByteString.Lazy.Char8 as BSC
import Data.Csv (defaultEncodeOptions, encodeByNameWith, encodeDefaultOrderedByName, header, namedRecord, (.=), DefaultOrdered, EncodeOptions(..), NamedRecord, Quoting(QuoteNone), ToNamedRecord) import Data.Csv (defaultEncodeOptions, encodeByNameWith, encodeDefaultOrderedByName, header, namedRecord, (.=), DefaultOrdered, EncodeOptions(..), NamedRecord, Quoting(QuoteNone), ToNamedRecord)
import qualified Data.Map.Strict as Map import qualified Data.Map.Strict as Map
import qualified Data.Text as T import qualified Data.Text as T
import qualified Data.Text.Encoding as TE
import Gargantext.API.Ngrams.Types (mSetToList, NgramsRepoElement(..), NgramsTableMap, NgramsTerm(..), unNgramsTerm) import Gargantext.API.Ngrams.Types (mSetToList, NgramsRepoElement(..), NgramsTableMap, NgramsTerm(..), unNgramsTerm)
import Gargantext.Core.Types.Main (ListType(..)) import Gargantext.Core.Types.Main (ListType(..))
import Network.HTTP.Media ((//), (/:)) import Network.HTTP.Media ((//), (/:))
...@@ -32,7 +31,7 @@ instance (DefaultOrdered a, ToNamedRecord a) => MimeRender CSV [a] where ...@@ -32,7 +31,7 @@ instance (DefaultOrdered a, ToNamedRecord a) => MimeRender CSV [a] where
mimeRender _ = encodeDefaultOrderedByName mimeRender _ = encodeDefaultOrderedByName
instance MimeRender CSV T.Text where instance MimeRender CSV T.Text where
mimeRender _ = BSC.fromStrict . TE.encodeUtf8 mimeRender _ = toUtf8Lazy
-- CSV: -- CSV:
-- header: status\tlabel\tforms -- header: status\tlabel\tforms
...@@ -72,7 +71,7 @@ instance Accept Markdown where ...@@ -72,7 +71,7 @@ instance Accept Markdown where
contentType _ = "text" // "markdown" contentType _ = "text" // "markdown"
instance MimeRender Markdown T.Text where instance MimeRender Markdown T.Text where
mimeRender _ = BSC.fromStrict . TE.encodeUtf8 mimeRender _ = toUtf8Lazy
instance MimeUnrender Markdown T.Text where instance MimeUnrender Markdown T.Text where
mimeUnrender _ = Right . TE.decodeUtf8 . BSC.toStrict mimeUnrender _ = Right . decodeUtf8 . BSC.toStrict
...@@ -81,7 +81,7 @@ protectedWith extraHeaders tkn mth url payload = ...@@ -81,7 +81,7 @@ protectedWith extraHeaders tkn mth url payload =
-- the extra headers will take precedence. -- the extra headers will take precedence.
let defaultHeaders = [ (hAccept, "application/json;charset=utf-8") let defaultHeaders = [ (hAccept, "application/json;charset=utf-8")
, (hContentType, "application/json") , (hContentType, "application/json")
, (hAuthorization, "Bearer " <> TE.encodeUtf8 tkn) , (hAuthorization, "Bearer " <> toUtf8 tkn)
] ]
hdrs = Map.toList $ Map.fromList $ defaultHeaders <> extraHeaders hdrs = Map.toList $ Map.fromList $ defaultHeaders <> extraHeaders
in request mth url hdrs payload in request mth url hdrs payload
...@@ -149,7 +149,7 @@ tests = sequential $ aroundAll withTestDBAndPort $ do ...@@ -149,7 +149,7 @@ tests = sequential $ aroundAll withTestDBAndPort $ do
withValidLogin port "alice" (GargPassword "alice") $ \token -> do withValidLogin port "alice" (GargPassword "alice") $ \token -> do
let ( roots_api :<|> _nodes_api let ( roots_api :<|> _nodes_api
) = client (Proxy :: Proxy (MkProtectedAPI GargAdminAPI)) (SA.Token $ TE.encodeUtf8 $ token) ) = client (Proxy :: Proxy (MkProtectedAPI GargAdminAPI)) (SA.Token $ toUtf8 $ token)
let ( admin_user_api_get :<|> _) = roots_api let ( admin_user_api_get :<|> _) = roots_api
_nodes <- runClientM admin_user_api_get (clientEnv port) _nodes <- runClientM admin_user_api_get (clientEnv port)
......
...@@ -84,7 +84,7 @@ pollUntilFinished :: HasCallStack ...@@ -84,7 +84,7 @@ pollUntilFinished :: HasCallStack
pollUntilFinished tkn port mkUrlPiece = go 60 pollUntilFinished tkn port mkUrlPiece = go 60
where where
go :: Int -> JobPollHandle -> WaiSession () JobPollHandle go :: Int -> JobPollHandle -> WaiSession () JobPollHandle
go 0 h = error $ T.unpack $ "pollUntilFinished exhausted attempts. Last found JobPollHandle: " <> T.decodeUtf8 (BL.toStrict $ JSON.encode h) go 0 h = error $ T.unpack $ "pollUntilFinished exhausted attempts. Last found JobPollHandle: " <> decodeUtf8 (JSON.encode h)
go n h = case _jph_status h == "IsPending" || _jph_status h == "IsRunning" of go n h = case _jph_status h == "IsPending" || _jph_status h == "IsRunning" of
True -> do True -> do
liftIO $ threadDelay 1_000_000 liftIO $ threadDelay 1_000_000
...@@ -92,7 +92,7 @@ pollUntilFinished tkn port mkUrlPiece = go 60 ...@@ -92,7 +92,7 @@ pollUntilFinished tkn port mkUrlPiece = go 60
go (n-1) h' go (n-1) h'
False False
| _jph_status h == "IsFailure" | _jph_status h == "IsFailure"
-> error $ T.unpack $ "JobPollHandle contains a failure: " <> T.decodeUtf8 (BL.toStrict $ JSON.encode h) -> error $ T.unpack $ "JobPollHandle contains a failure: " <> decodeUtf8 (JSON.encode h)
| otherwise | otherwise
-> pure h -> pure h
......
...@@ -49,7 +49,7 @@ bootstrapDB tmpDB pool _cfg = Pool.withResource pool $ \conn -> do ...@@ -49,7 +49,7 @@ bootstrapDB tmpDB pool _cfg = Pool.withResource pool $ \conn -> do
schemaPath <- gargDBSchema schemaPath <- gargDBSchema
let connString = Tmp.toConnectionString tmpDB let connString = Tmp.toConnectionString tmpDB
(res,ec) <- shelly $ silently $ escaping False $ do (res,ec) <- shelly $ silently $ escaping False $ do
result <- SH.run "psql" ["-d", "\"" <> TE.decodeUtf8 connString <> "\"", "<", fromString schemaPath] result <- SH.run "psql" ["-d", "\"" <> decodeUtf8 connString <> "\"", "<", fromString schemaPath]
(result,) <$> lastExitCode (result,) <$> lastExitCode
unless (ec == 0) $ throwIO (Prelude.userError $ show ec <> ": " <> T.unpack res) unless (ec == 0) $ throwIO (Prelude.userError $ show ec <> ": " <> T.unpack res)
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment