Commit 4366cb29 authored by Grégoire Locqueville's avatar Grégoire Locqueville

Started removing some dead or commented code

Also started adding some stuff to `weeder.toml`, but it should be
discussed some more.
parent b71620ea
......@@ -21,31 +21,6 @@ import CLI.Types
-- | Main | --
--------------
phyloConfig :: FilePath -> PhyloConfig
phyloConfig outdir = PhyloConfig {
corpusPath = "corpus.csv"
, listPath = "list.csv"
, outputPath = outdir
, corpusParser = Tsv {_tsv_limit = 150000}
, listParser = V4
, phyloName = "phylo_profile_test"
, phyloScale = 2
, similarity = WeightedLogJaccard {_wlj_sensibility = 0.5, _wlj_minSharedNgrams = 2}
, seaElevation = Constante {_cons_start = 0.1, _cons_gap = 0.1}
, defaultMode = True
, findAncestors = False
, phyloSynchrony = ByProximityThreshold {_bpt_threshold = 0.5, _bpt_sensibility = 0.0, _bpt_scope = AllBranches, _bpt_strategy = MergeAllGroups}
, phyloQuality = Quality {_qua_granularity = 0.8, _qua_minBranch = 3}
, timeUnit = Year {_year_period = 3, _year_step = 1, _year_matchingFrame = 5}
, clique = MaxClique {_mcl_size = 5, _mcl_threshold = 1.0e-4, _mcl_filter = ByThreshold}
, exportLabel = [ BranchLabel {_branch_labelTagger = MostEmergentTfIdf, _branch_labelSize = 2}
, GroupLabel {_group_labelTagger = MostEmergentInclusive, _group_labelSize = 2}
]
, exportSort = ByHierarchy {_sort_order = Desc}
, exportFilter = [ByBranchSize {_branch_size = 3.0}]
}
phyloProfileCLI :: HasCallStack => IO ()
phyloProfileCLI = do
......
......@@ -36,13 +36,12 @@ module Gargantext.API
import Control.Concurrent.Async qualified as Async
import Data.Cache qualified as InMemory
import Data.List (lookup)
import Data.Set qualified as Set
import Data.Text (pack)
import Data.Text.Encoding qualified as TE
import Data.Text.IO (putStrLn)
import Gargantext.API.Admin.Auth.Types (AuthContext)
import Gargantext.API.Admin.EnvTypes (Env, FireWall(..), Mode(..), env_config, env_jwt_settings)
import Gargantext.API.Admin.EnvTypes (Env, Mode(..), env_config, env_jwt_settings)
import Gargantext.API.Admin.Settings (newEnv)
import Gargantext.API.Middleware (logStdoutDevSanitised)
import Gargantext.API.Routes.Named (API)
......@@ -57,7 +56,7 @@ import Gargantext.MicroServices.ReverseProxy (microServicesProxyApp)
import Gargantext.Prelude hiding (putStrLn, to)
import Gargantext.System.Logging (withLoggerHoisted)
import Network.HTTP.Types hiding (Query)
import Network.Wai (Middleware, Request, requestHeaders)
import Network.Wai (Middleware)
import Network.Wai.Handler.Warp hiding (defaultSettings)
import Network.Wai.Middleware.Cors
import Network.Wai.Middleware.RequestLogger (logStdout)
......@@ -145,35 +144,8 @@ schedulePeriodicActions _env =
]
in foldlM (\ !acc action -> (`mappend` acc) <$> Cron.execSchedule action) [] actions
{-
where
refreshDBViews :: Cron.Schedule ()
refreshDBViews = do
let doRefresh = do
res <- DB.runCmd env (refreshNgramsMaterialized :: Cmd IOException ())
case res of
Left e -> liftIO $ putStrLn $ pack ("Refreshing Ngrams materialized view failed: " <> displayException e)
Right () -> do
_ <- liftIO $ putStrLn $ pack "Refresh Index Database done"
pure ()
Cron.addJob doRefresh "* 2 * * *"
-}
----------------------------------------------------------------------
fireWall :: Applicative f => Request -> FireWall -> f Bool
fireWall req fw = do
let origin = lookup "Origin" (requestHeaders req)
let host = lookup "Host" (requestHeaders req)
if origin == Just (encodeUtf8 "http://localhost:8008")
&& host == Just (encodeUtf8 "localhost:3000")
|| (not $ unFireWall fw)
then pure True
else pure False
makeGargMiddleware :: CORSSettings -> Mode -> IO Middleware
makeGargMiddleware crsSettings mode = do
let corsMiddleware = cors $ \_incomingRq -> Just
......@@ -201,12 +173,6 @@ makeGargMiddleware crsSettings mode = do
makeApp :: Env -> IO Application
makeApp env = do
pure $ serveWithContext api cfg (server env)
-- (ekgStore, ekgMid) <- newEkgStore api
-- ekgDir <- (</> "ekg-assets") <$> getDataDir
-- pure $ ekgMid $ serveWithContext apiWithEkg cfg
-- (WithEkg { ekgAPI = ekgServer ekgDir ekgStore
-- , wrappedAPI = server env
-- })
where
cfg :: Servant.Context AuthContext
cfg = env ^. env_jwt_settings
......@@ -222,7 +188,3 @@ data WithEkg api mode = WithEkg
{ ekgAPI :: mode :- NamedRoutes EkgAPI
, wrappedAPI :: mode :- NamedRoutes api
} deriving Generic
apiWithEkg :: Proxy (NamedRoutes (WithEkg API))
apiWithEkg = Proxy
......@@ -31,11 +31,9 @@ And you have the main viz
module Gargantext.API.Admin.Auth
( auth
, withPolicy
, withPolicyT
, withNamedPolicyT
, forgotPassword
, forgotPasswordAsync
, withAccess
, withNamedAccess
, ForgotPasswordAsyncParams
......@@ -52,7 +50,7 @@ import Gargantext.API.Admin.Auth.Types
import Gargantext.API.Admin.EnvTypes (Env)
import Gargantext.API.Auth.PolicyCheck
import Gargantext.API.Errors (BackendInternalError(..), HasAuthenticationError, _AuthenticationError)
import Gargantext.API.Prelude (authenticationError, HasServerError, GargServerC, _ServerError, GargM, IsGargServer)
import Gargantext.API.Prelude (authenticationError, HasServerError, _ServerError, GargM, IsGargServer)
import Gargantext.API.Routes.Named qualified as Named
import Gargantext.API.Worker (serveWorkerAPI)
import Gargantext.Core.Config (HasJWTSettings(..))
......@@ -70,7 +68,7 @@ import Gargantext.Database.Schema.Node (NodePoly(_node_id))
import Gargantext.Prelude hiding (Handler, reverse, to)
import Gargantext.Prelude.Crypto.Auth qualified as Auth
import Gargantext.Prelude.Crypto.Pass.User (gargPass)
import Servant (HasServer, ServerT, NamedRoutes, errBody, hoistServer, err404)
import Servant (HasServer, NamedRoutes, errBody, hoistServer, err404)
import Servant.Auth.Server (makeJWT)
import Servant.Server.Generic (AsServerT)
......@@ -125,19 +123,6 @@ auth (AuthRequest u p) = do
throwError $ _AuthenticationError # InvalidUsernameOrPassword
Valid to trId uId -> pure $ AuthResponse to trId uId
--type instance BasicAuthCfg = BasicAuthData -> IO (AuthResult AuthenticatedUser)
{-
instance FromBasicAuthData AuthenticatedUser where
fromBasicAuthData authData authCheckFunction = authCheckFunction authData
authCheck :: forall env. env
-> BasicAuthData
-> IO (AuthResult AuthenticatedUser)
authCheck _env (BasicAuthData login password) = pure $
maybe Indefinite Authenticated $ TODO
-}
withAccessM :: ( IsDBCmd env err m )
=> AuthenticatedUser
-> PathId
......@@ -154,15 +139,6 @@ withAccessM (AuthenticatedUser nodeId _userId) (PathNodeNode cId docId) m = do
then m
else m -- serverError err401
withAccess :: forall env err m api.
(GargServerC env err m, HasServer api '[]) =>
Proxy api -> Proxy m -> AuthenticatedUser -> PathId ->
ServerT api m -> ServerT api m
withAccess p _ ur id = hoistServer p f
where
f :: forall a. m a -> m a
f = withAccessM ur id
withNamedAccess :: forall env err m routes.
( IsGargServer env err m
, HasServer (NamedRoutes routes) '[]
......@@ -205,22 +181,6 @@ withNamedPolicyT :: forall env m routes.
withNamedPolicyT ur checks m mgr =
hoistServer (Proxy @(NamedRoutes routes)) (\n -> withPolicy ur checks n mgr) m
withPolicyT :: forall env m api. (
IsGargServer env BackendInternalError m
, HasServer api '[]
)
=> Proxy api
-> Proxy m
-> AuthenticatedUser
-> BoolExpr AccessCheck
-> ServerT api m
-> AccessPolicyManager
-> ServerT api m
withPolicyT p _ ur checks m0 mgr = hoistServer p f m0
where
f :: forall a. m a -> m a
f m = withPolicy ur checks m mgr
{- | Collaborative Schema
User at his root can create Teams Folder
User can create Team in Teams Folder.
......
......@@ -44,9 +44,6 @@ data ExternalAPIs = OpenAlex
instance FromJSON ExternalAPIs
instance ToJSON ExternalAPIs
externalAPIs :: [ExternalAPIs]
externalAPIs = [minBound .. maxBound]
instance Arbitrary ExternalAPIs
where
arbitrary = arbitraryBoundedEnum
......
......@@ -42,32 +42,6 @@ import System.IO.Temp (withTempFile)
newtype IniFile = IniFile { _IniFile :: FilePath }
deriving (Show, Eq, IsString)
{- NOT USED YET
import System.Environment (lookupEnv)
reqSetting :: FromHttpApiData a => Text -> IO a
reqSetting name = do
e <- fromMaybe (panic $ "Missing " <> name) <$> lookupEnv (unpack name)
pure $ either (panic $ "Unable to parse " <> name) identity $ parseUrlPiece $ pack e
optSetting :: FromHttpApiData a => Text -> a -> IO a
optSetting name d = do
me <- lookupEnv (unpack name)
case me of
Nothing -> pure d
Just e -> pure $ either (panic $ "Unable to parse " <> name) identity $ parseUrlPiece $ pack e
settingsFromEnvironment :: IO Settings
settingsFromEnvironment =
Settings <$> (encodeUtf8 <$> reqSetting "ALLOWED_ORIGIN")
<*> (encodeUtf8 <$> reqSetting "ALLOWED_HOST")
<*> optSetting "PORT" 3000
<*> (parseLogLevel <$> optSetting "LOG_LEVEL" "warn")
<*> reqSetting "DB_SERVER"
<*> (parseJwk <$> reqSetting "JWT_SECRET")
<*> optSetting "SEND_EMAIL" SendEmailViaAws
-}
-----------------------------------------------------------------------
-- | RepoDir FilePath configuration
type RepoDirFilePath = FilePath
......@@ -87,82 +61,13 @@ repoSaverAction repoDir a = do
renameFile fp (repoSnapshot repoDir)
{-
-- The use of mkDebounce makes sure that repoSaverAction is not called too often.
-- If repoSaverAction start taking more time than the debounceFreq then it should
-- be increased.
mkRepoSaver :: RepoDirFilePath -> MVar NgramsRepo -> IO (IO ())
mkRepoSaver repoDir repo_var = mkDebounce settings'
where
settings' = defaultDebounceSettings
{ debounceFreq = let n = 6 :: Int in 10^n -- 1 second
, debounceAction = withMVar repo_var (repoSaverAction repoDir)
-- Here this not only `readMVar` but `takeMVar`.
-- Namely while repoSaverAction is saving no other change
-- can be made to the MVar.
-- This might be not efficent and thus reconsidered later.
-- However this enables to safely perform a *final* save.
-- See `cleanEnv`.
-- Future work:
-- Add a new MVar just for saving.
}
-}
{-
readRepoEnv :: FilePath -> IO RepoEnv
readRepoEnv repoDir = do
-- Does file exist ? :: Bool
_repoDir <- createDirectoryIfMissing True repoDir
repoFile <- doesFileExist (repoSnapshot repoDir)
-- Is file not empty ? :: Bool
repoExists <- if repoFile
then (>0) <$> getFileSize (repoSnapshot repoDir)
else pure False
mlock <- tryLockFile (repoSnapshot repoDir) Exclusive
lock <- maybe (panic "Repo file already locked") pure mlock
mvar <- newMVar =<<
if repoExists
then do
-- e_repo <- eitherDecodeStrict <$> deserialise <$> L.readFile repoSnapshot
repo <- deserialise <$> L.readFile (repoSnapshot repoDir)
-- repo <- either fail pure e_repo
let archive = (repoSnapshot repoDir) <> ".v" <> show (repo ^. r_version)
copyFile (repoSnapshot repoDir) archive
pure repo
else
pure initRepo
-- TODO save in DB here
saver <- mkRepoSaver repoDir mvar
pure $ RepoEnv { _renv_var = mvar, _renv_saver = saver, _renv_lock = lock }
--}
newEnv :: Logger (GargM Env BackendInternalError) -> GargConfig -> D.Dispatcher -> IO Env
newEnv logger config dispatcher = do
!manager_env <- newTlsManager
-- prios <- withLogger () $ \ioLogger -> Jobs.readPrios ioLogger (sf <> ".jobs")
-- let prios' = Jobs.applyPrios prios Jobs.defaultPrios
-- putStrLn ("Overrides: " <> show prios :: Text)
-- putStrLn ("New priorities: " <> show prios' :: Text)
!pool <- newPool $ _gc_database_config config
!nodeStory_env <- fromDBNodeStoryEnv pool
-- secret <- Jobs.genSecret
-- let jobs_settings = (Jobs.defaultJobSettings 1 secret)
-- & Jobs.l_jsJobTimeout .~ (fromIntegral $ config_env ^. hasConfig ^. gc_jobs . jc_js_job_timeout)
-- & Jobs.l_jsIDTimeout .~ (fromIntegral $ config_env ^. hasConfig ^. gc_jobs . jc_js_id_timeout)
!_env_jwt_settings <- jwtSettings (_gc_secrets config)
--_central_exchange <- forkIO $ CE.gServer (_gc_notifications_config config_env)
{- An 'Env' by default doesn't have strict fields, but when constructing one in production
we want to force them to WHNF to avoid accumulating unnecessary thunks.
-}
......@@ -179,11 +84,3 @@ newEnv logger config dispatcher = do
newPool :: ConnectInfo -> IO (Pool Connection)
newPool param =
Pool.newPool $ Pool.setNumStripes (Just 1) $ Pool.defaultPoolConfig (connect param) close (60*60) 8
{-
cleanEnv :: (HasConfig env, HasRepo env) => env -> IO ()
cleanEnv env = do
r <- takeMVar (env ^. repoEnv . renv_var)
repoSaverAction (env ^. hasConfig . gc_repofilepath) r
unlockFile (env ^. repoEnv . renv_lock)
--}
......@@ -60,59 +60,6 @@ import Gargantext.Prelude
import Gargantext.System.Logging ( logLocM, LogLevel(..) )
import Gargantext.Utils.Jobs.Monad (JobHandle, MonadJobStatus(..))
------------------------------------------------------------------------
{-
data Query = Query { query_query :: Text
, query_node_id :: Int
, query_lang :: Lang
, query_databases :: [DataOrigin]
}
deriving (Eq, Generic)
deriveJSON (unPrefix "query_") 'Query
instance Arbitrary Query where
arbitrary = elements [ Query q n la fs
| q <- ["honeybee* AND collapse"
,"covid 19"
]
, n <- [0..10]
, la <- allLangs
, fs <- take 3 $ repeat allDataOrigins
]
instance ToSchema Query where
declareNamedSchema = genericDeclareNamedSchema (unPrefixSwagger "query_")
-}
------------------------------------------------------------------------
{-
type Api = PostApi
:<|> GetApi
type PostApi = Summary "New Corpus endpoint"
:> ReqBody '[JSON] Query
:> Post '[JSON] CorpusId
type GetApi = Get '[JSON] ApiInfo
-}
-- | TODO manage several apis
-- TODO-ACCESS
-- TODO this is only the POST
{-
api :: (FlowCmdM env err m) => UserId -> Query -> m CorpusId
api uid (Query q _ as) = do
cId <- case head as of
Nothing -> flowCorpusSearchInDatabase (UserDBId uid) EN q
Just API.All -> flowCorpusSearchInDatabase (UserDBId uid) EN q
Just a -> do
docs <- liftBase $ API.get a q (Just 1000)
cId' <- flowCorpus (UserDBId uid) (Left q) (Multi EN) [docs]
pure cId'
pure cId
-}
------------------------------------------------
-- TODO use this route for Client implementation
......@@ -123,22 +70,6 @@ deriveJSON (unPrefix "") 'ApiInfo
instance ToSchema ApiInfo
info :: ApiInfo
info = ApiInfo API.externalAPIs
{-
type AddWithFile = Summary "Add with MultipartData to corpus endpoint"
:> "corpus"
:> Capture "corpus_id" CorpusId
:> "add"
:> "file"
:> MultipartForm Mem (MultipartData Mem)
:> QueryParam "fileType" FileType
:> "async"
:> AsyncJobs JobLog '[JSON] () JobLog
-}
------------------------------------------------------------------------
-- TODO WithQuery also has a corpus id
......
......@@ -17,13 +17,12 @@ module Gargantext.Core.Text.Corpus.API
, Corpus.Limit(..)
, GetCorpusError(..)
, get
, externalAPIs
) where
import Conduit ( ConduitT, yieldMany )
import Data.Text qualified as T
import EPO.API.Client.Types qualified as EPO
import Gargantext.API.Admin.Orchestrator.Types (ExternalAPIs(..), externalAPIs)
import Gargantext.API.Admin.Orchestrator.Types (ExternalAPIs(..))
import Gargantext.Core (Lang(..), toISO639)
import Gargantext.Core.Text.Corpus.API.Arxiv qualified as Arxiv
import Gargantext.Core.Text.Corpus.API.EPO qualified as EPO
......
......@@ -45,8 +45,6 @@ module Gargantext.Database.Action.Flow -- (flowDatabase, ngrams2list)
, getOrMkRootWithCorpus
, TermType(..)
, DataOrigin(..)
, allDataOrigins
, do_api
)
where
......@@ -114,11 +112,6 @@ import PUBMED.Types qualified as PUBMED
-- Imports for upgrade function
import Gargantext.Database.Query.Tree.Error ( HasTreeError )
------------------------------------------------------------------------
allDataOrigins :: [DataOrigin]
allDataOrigins = map InternalOrigin API.externalAPIs <> map ExternalOrigin API.externalAPIs
---------------
-- Show instance is not possible because of IO
......
type-class-roots = true
roots = ['^Main\.main$', '^Paths_.*']
roots = [ '^Main\.main$'
, '^Paths_.*'
# I'm keeping definitions whose name starts with `test`, in order to
# avoid removing something that might have value, but we should clarify
# what the purpose of each is, and whether the main source tree is the
# right place for them (rather than, say, in the tests)
, 'CLI.FilterTermsAndCooc.testCorpus'
, 'CLI.FilterTermsAndCooc.testTermList'
]
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment