[phylo] work on day/hour/minute/second

Too many refactorings unfortunately.

Goes together with
https://gitlab.iscpif.fr/gargantext/haskell-gargantext/commits/494-dev-phylo-for-hh-mm-ss
parent 0c8c7901
Pipeline #7810 failed with stages
in 24 minutes and 40 seconds
...@@ -407,6 +407,7 @@ layoutWithContextNgramsCpt = here.component "layoutWithContextNgrams" cpt ...@@ -407,6 +407,7 @@ layoutWithContextNgramsCpt = here.component "layoutWithContextNgrams" cpt
------------------------------------------------------------- -------------------------------------------------------------
publicationDate :: Document -> String publicationDate :: Document -> String
publicationDate (Document { publication_date: Just date }) = date
publicationDate (Document { publication_year: Nothing }) = "" publicationDate (Document { publication_year: Nothing }) = ""
publicationDate (Document { publication_year: Just py, publication_month: Nothing }) = U.zeroPad 2 py publicationDate (Document { publication_year: Just py, publication_month: Nothing }) = U.zeroPad 2 py
publicationDate (Document { publication_year: Just py, publication_month: Just pm, publication_day: Nothing }) = (U.zeroPad 2 py) <> "-" <> (U.zeroPad 2 pm) publicationDate (Document { publication_year: Just py, publication_month: Just pm, publication_day: Nothing }) = (U.zeroPad 2 py) <> "-" <> (U.zeroPad 2 pm)
......
...@@ -16,6 +16,7 @@ import Gargantext.Components.Forest.Tree.Node.Tools as Tools ...@@ -16,6 +16,7 @@ import Gargantext.Components.Forest.Tree.Node.Tools as Tools
import Gargantext.Components.PhyloExplorer.API as Phylo import Gargantext.Components.PhyloExplorer.API as Phylo
import Gargantext.Components.PhyloExplorer.Config.ConfigForm as PhyloForm import Gargantext.Components.PhyloExplorer.Config.ConfigForm as PhyloForm
import Gargantext.Components.PhyloExplorer.ConfigFormParser as PhyloHook import Gargantext.Components.PhyloExplorer.ConfigFormParser as PhyloHook
import Gargantext.Components.PhyloExplorer.JSON as PhyloJSON
import Gargantext.Components.GraphExplorer.Types (GraphMetric(..), Strength(..), PartitionMethod(..), BridgenessMethod(..)) import Gargantext.Components.GraphExplorer.Types (GraphMetric(..), Strength(..), PartitionMethod(..), BridgenessMethod(..))
import Gargantext.Config.REST (AffRESTError) import Gargantext.Config.REST (AffRESTError)
import Gargantext.Hooks.UseFeatureFlag as Feature import Gargantext.Hooks.UseFeatureFlag as Feature
...@@ -168,7 +169,7 @@ updatePhyloCpt = here.component "updatePhylo" cpt ...@@ -168,7 +169,7 @@ updatePhyloCpt = here.component "updatePhylo" cpt
, synchrony: 0.5 , synchrony: 0.5
, quality: 0.8 , quality: 0.8
, exportFilter: 3.0 , exportFilter: 3.0
, timeUnit: Phylo.Year $ Phylo.TimeUnitCriteria , timeUnit: PhyloJSON.Year $ PhyloJSON.TimeUnitCriteria
{ period: 3 { period: 3
, step: 1 , step: 1
, matchingFrame: 5 , matchingFrame: 5
......
...@@ -15,38 +15,38 @@ import Gargantext.Types as GT ...@@ -15,38 +15,38 @@ import Gargantext.Types as GT
import Gargantext.Types as Types import Gargantext.Types as Types
import Gargantext.Utils.Toestand as T2 import Gargantext.Utils.Toestand as T2
type GraphAsyncUpdateParams = -- type GraphAsyncUpdateParams =
( graphId :: Int -- ( graphId :: Int
, listId :: Int -- , listId :: Int
, nodes :: Array (Record SigmaxT.Node) -- , nodes :: Array (Record SigmaxT.Node)
, session :: Session -- , session :: Session
, termList :: GT.TermList -- , termList :: GT.TermList
, version :: CNT.Version -- , version :: CNT.Version
) -- )
graphAsyncUpdate :: Record GraphAsyncUpdateParams -> AffRESTError GAT.Task -- graphAsyncUpdate :: Record GraphAsyncUpdateParams -> AffRESTError GAT.Task
graphAsyncUpdate { graphId, listId, nodes, session, termList, version } = do -- graphAsyncUpdate { graphId, listId, nodes, session, termList, version } = do
post session p q -- post session p q
where -- where
p = GR.GraphAPI graphId $ GT.asyncTaskTypePath GT.GraphRecompute -- p = GR.GraphAPI graphId $ GT.asyncTaskTypePath GT.GraphRecompute
q = -- q =
{ listId -- { listId
, nodes -- , nodes
, termList -- , termList
, version -- , version
} -- }
type GraphAsyncRecomputeParams = -- type GraphAsyncRecomputeParams =
( graphId :: Int -- ( graphId :: Int
, session :: Session -- , session :: Session
) -- )
graphAsyncRecompute :: Record GraphAsyncRecomputeParams -> AffRESTError GAT.Task -- graphAsyncRecompute :: Record GraphAsyncRecomputeParams -> AffRESTError GAT.Task
graphAsyncRecompute { graphId, session } = do -- graphAsyncRecompute { graphId, session } = do
post session p q -- post session p q
where -- where
p = GR.GraphAPI graphId $ GT.asyncTaskTypePath GT.GraphRecompute -- p = GR.GraphAPI graphId $ GT.asyncTaskTypePath GT.GraphRecompute
q = {} -- q = {}
type GraphVersions = type GraphVersions =
( gv_graph :: Maybe Int ( gv_graph :: Maybe Int
......
...@@ -16,6 +16,7 @@ import Gargantext.Components.GraphExplorer.Layout (convert, layout, transformGra ...@@ -16,6 +16,7 @@ import Gargantext.Components.GraphExplorer.Layout (convert, layout, transformGra
import Gargantext.Components.GraphExplorer.Store as GraphStore import Gargantext.Components.GraphExplorer.Store as GraphStore
import Gargantext.Components.GraphExplorer.Types as GET import Gargantext.Components.GraphExplorer.Types as GET
import Gargantext.Config.REST (logRESTError) import Gargantext.Config.REST (logRESTError)
import Gargantext.Config.Utils (errorHandlerThatShowsOnFrontend)
import Gargantext.Hooks.FirstEffect (useFirstEffect') import Gargantext.Hooks.FirstEffect (useFirstEffect')
import Gargantext.Hooks.Loader (useLoaderEffect) import Gargantext.Hooks.Loader (useLoaderEffect)
import Gargantext.Hooks.Session (useSession) import Gargantext.Hooks.Session (useSession)
...@@ -47,6 +48,7 @@ nodeCpt = R2.hereComponent here "node" hCpt ...@@ -47,6 +48,7 @@ nodeCpt = R2.hereComponent here "node" hCpt
-- | States -- | States
-- | -- |
{ graphVersion { graphVersion
, errors
} <- AppStore.use } <- AppStore.use
session <- useSession session <- useSession
...@@ -55,10 +57,6 @@ nodeCpt = R2.hereComponent here "node" hCpt ...@@ -55,10 +57,6 @@ nodeCpt = R2.hereComponent here "node" hCpt
state <- T.useBox Nothing state <- T.useBox Nothing
cache <- T.useBox (GET.defaultCacheParams :: GET.CacheParams) cache <- T.useBox (GET.defaultCacheParams :: GET.CacheParams)
-- | Computed
-- |
let errorHandler = logRESTError hp
-- | Hooks -- | Hooks
-- | -- |
...@@ -67,7 +65,7 @@ nodeCpt = R2.hereComponent here "node" hCpt ...@@ -67,7 +65,7 @@ nodeCpt = R2.hereComponent here "node" hCpt
R2.loadLocalStorageState R2.graphParamsKey cache R2.loadLocalStorageState R2.graphParamsKey cache
useLoaderEffect useLoaderEffect
{ errorHandler { errorHandler: errorHandlerThatShowsOnFrontend errors
, loader: GraphAPI.getNodes session graphVersion' , loader: GraphAPI.getNodes session graphVersion'
, path: graphId , path: graphId
, state , state
......
module Gargantext.Components.PhyloExplorer.API module Gargantext.Components.PhyloExplorer.API
( get ( get
, UpdateData(..) , UpdateData(..)
, TimeUnit(..)
, ReflexiveTimeUnit(..)
, TimeUnitCriteria(..)
, Clique(..) , Clique(..)
, ReflexiveClique(..) , ReflexiveClique(..)
, CliqueFilter(..) , CliqueFilter(..)
, toReflexiveTimeUnit
, fromReflexiveTimeUnit
, extractCriteria , extractCriteria
, toReflexiveClique , toReflexiveClique
, update , update
...@@ -22,7 +17,7 @@ import Data.Maybe (Maybe(..)) ...@@ -22,7 +17,7 @@ import Data.Maybe (Maybe(..))
import Data.Newtype (class Newtype) import Data.Newtype (class Newtype)
import Data.Show.Generic (genericShow) import Data.Show.Generic (genericShow)
import Gargantext.AsyncTasks as GAT import Gargantext.AsyncTasks as GAT
import Gargantext.Components.PhyloExplorer.JSON (PhyloJSON) import Gargantext.Components.PhyloExplorer.JSON (PhyloJSON, TimeUnit(..), TimeUnitCriteria(..))
import Gargantext.Components.PhyloExplorer.Types (PhyloSet, parseToPhyloSet) import Gargantext.Components.PhyloExplorer.Types (PhyloSet, parseToPhyloSet)
import Gargantext.Config.REST (AffRESTError) import Gargantext.Config.REST (AffRESTError)
import Gargantext.Routes (SessionRoute(..)) import Gargantext.Routes (SessionRoute(..))
...@@ -87,131 +82,6 @@ instance JSON.WriteForeign UpdateData where ...@@ -87,131 +82,6 @@ instance JSON.WriteForeign UpdateData where
(Proxy :: Proxy "exportFilter") (Proxy :: Proxy "exportFilter")
(Proxy :: Proxy "_sc_exportFilter") (Proxy :: Proxy "_sc_exportFilter")
data TimeUnit
= Epoch TimeUnitCriteria
| Year TimeUnitCriteria
| Month TimeUnitCriteria
| Week TimeUnitCriteria
| Day TimeUnitCriteria
derive instance Generic TimeUnit _
derive instance Eq TimeUnit
instance Show TimeUnit where
show = genericShow
instance JSON.ReadForeign TimeUnit where
readImpl = JSONG.untaggedSumRep
instance JSON.WriteForeign TimeUnit where
writeImpl = case _ of
Epoch (TimeUnitCriteria o) -> (JSON.writeImpl <<< parseEpoch) o
Year (TimeUnitCriteria o) -> (JSON.writeImpl <<< parseYear) o
Month (TimeUnitCriteria o) -> (JSON.writeImpl <<< parseMonth) o
Week (TimeUnitCriteria o) -> (JSON.writeImpl <<< parseWeek) o
Day (TimeUnitCriteria o) -> (JSON.writeImpl <<< parseDay) o
where
parseEpoch =
Record.rename
(Proxy :: Proxy "period")
(Proxy :: Proxy "_epoch_period")
>>> Record.rename
(Proxy :: Proxy "step")
(Proxy :: Proxy "_epoch_step")
>>> Record.rename
(Proxy :: Proxy "matchingFrame")
(Proxy :: Proxy "_epoch_matchingFrame")
>>> Record.insert
(Proxy :: Proxy "tag")
"Epoch"
parseYear =
Record.rename
(Proxy :: Proxy "period")
(Proxy :: Proxy "_year_period")
>>> Record.rename
(Proxy :: Proxy "step")
(Proxy :: Proxy "_year_step")
>>> Record.rename
(Proxy :: Proxy "matchingFrame")
(Proxy :: Proxy "_year_matchingFrame")
>>> Record.insert
(Proxy :: Proxy "tag")
"Year"
parseMonth =
Record.rename
(Proxy :: Proxy "period")
(Proxy :: Proxy "_month_period")
>>> Record.rename
(Proxy :: Proxy "step")
(Proxy :: Proxy "_month_step")
>>> Record.rename
(Proxy :: Proxy "matchingFrame")
(Proxy :: Proxy "_month_matchingFrame")
>>> Record.insert
(Proxy :: Proxy "tag")
"Month"
parseWeek =
Record.rename
(Proxy :: Proxy "period")
(Proxy :: Proxy "_week_period")
>>> Record.rename
(Proxy :: Proxy "step")
(Proxy :: Proxy "_week_step")
>>> Record.rename
(Proxy :: Proxy "matchingFrame")
(Proxy :: Proxy "_week_matchingFrame")
>>> Record.insert
(Proxy :: Proxy "tag")
"Week"
parseDay =
Record.rename
(Proxy :: Proxy "period")
(Proxy :: Proxy "_day_period")
>>> Record.rename
(Proxy :: Proxy "step")
(Proxy :: Proxy "_day_step")
>>> Record.rename
(Proxy :: Proxy "matchingFrame")
(Proxy :: Proxy "_day_matchingFrame")
>>> Record.insert
(Proxy :: Proxy "tag")
"Day"
data ReflexiveTimeUnit
= Epoch_
| Year_
| Month_
| Week_
| Day_
derive instance Generic ReflexiveTimeUnit _
derive instance Eq ReflexiveTimeUnit
instance Show ReflexiveTimeUnit where
show = genericShow
instance Read ReflexiveTimeUnit where
read :: String -> Maybe ReflexiveTimeUnit
read = case _ of
"Epoch_" -> Just Epoch_
"Year_" -> Just Year_
"Month_" -> Just Month_
"Week_" -> Just Week_
"Day_" -> Just Day_
_ -> Nothing
newtype TimeUnitCriteria = TimeUnitCriteria
{ period :: Int
, step :: Int
, matchingFrame :: Int
}
derive instance Generic TimeUnitCriteria _
derive instance Eq TimeUnitCriteria
derive instance Newtype TimeUnitCriteria _
instance Show TimeUnitCriteria where
show = genericShow
derive newtype instance JSON.ReadForeign TimeUnitCriteria
data Clique data Clique
= FIS = FIS
{ support :: Int { support :: Int
...@@ -296,26 +166,15 @@ instance Read CliqueFilter where ...@@ -296,26 +166,15 @@ instance Read CliqueFilter where
"ByNeighbours" -> Just ByNeighbours "ByNeighbours" -> Just ByNeighbours
_ -> Nothing _ -> Nothing
toReflexiveTimeUnit :: TimeUnit -> ReflexiveTimeUnit
toReflexiveTimeUnit (Epoch _) = Epoch_
toReflexiveTimeUnit (Year _) = Year_
toReflexiveTimeUnit (Month _) = Month_
toReflexiveTimeUnit (Week _) = Week_
toReflexiveTimeUnit (Day _) = Day_
fromReflexiveTimeUnit :: ReflexiveTimeUnit -> TimeUnitCriteria -> TimeUnit
fromReflexiveTimeUnit Epoch_ c = Epoch c
fromReflexiveTimeUnit Year_ c = Year c
fromReflexiveTimeUnit Month_ c = Month c
fromReflexiveTimeUnit Week_ c = Week c
fromReflexiveTimeUnit Day_ c = Day c
extractCriteria :: TimeUnit -> TimeUnitCriteria extractCriteria :: TimeUnit -> TimeUnitCriteria
extractCriteria (Epoch (o :: TimeUnitCriteria)) = o extractCriteria (Epoch (o :: TimeUnitCriteria)) = o
extractCriteria (Year (o :: TimeUnitCriteria)) = o extractCriteria (Year (o :: TimeUnitCriteria)) = o
extractCriteria (Month (o :: TimeUnitCriteria)) = o extractCriteria (Month (o :: TimeUnitCriteria)) = o
extractCriteria (Week (o :: TimeUnitCriteria)) = o extractCriteria (Week (o :: TimeUnitCriteria)) = o
extractCriteria (Day (o :: TimeUnitCriteria)) = o extractCriteria (Day (o :: TimeUnitCriteria)) = o
extractCriteria (Hour (o :: TimeUnitCriteria)) = o
extractCriteria (Minute (o :: TimeUnitCriteria)) = o
extractCriteria (Second (o :: TimeUnitCriteria)) = o
toReflexiveClique :: Clique -> ReflexiveClique toReflexiveClique :: Clique -> ReflexiveClique
toReflexiveClique (FIS _) = FIS_ toReflexiveClique (FIS _) = FIS_
......
...@@ -12,7 +12,8 @@ import Data.Show.Generic (genericShow) ...@@ -12,7 +12,8 @@ import Data.Show.Generic (genericShow)
import Effect (Effect) import Effect (Effect)
import Gargantext.Components.Bootstrap as B import Gargantext.Components.Bootstrap as B
import Gargantext.Components.Bootstrap.Types (ButtonVariant(..), ComponentStatus(..), Variant(..)) import Gargantext.Components.Bootstrap.Types (ButtonVariant(..), ComponentStatus(..), Variant(..))
import Gargantext.Components.PhyloExplorer.API (CliqueFilter(..), ReflexiveClique(..), ReflexiveTimeUnit(..)) import Gargantext.Components.PhyloExplorer.API (CliqueFilter(..), ReflexiveClique(..))
import Gargantext.Components.PhyloExplorer.Types (ReflexiveTimeUnit(..))
import Gargantext.Hooks.FormValidation (VForm, useFormValidation) import Gargantext.Hooks.FormValidation (VForm, useFormValidation)
import Gargantext.Hooks.FormValidation.Unboxed as FV import Gargantext.Hooks.FormValidation.Unboxed as FV
import Gargantext.Hooks.StateRecord (useStateRecord) import Gargantext.Hooks.StateRecord (useStateRecord)
...@@ -195,6 +196,15 @@ component = R.hooksComponent "configForm" cpt ...@@ -195,6 +196,15 @@ component = R.hooksComponent "configForm" cpt
, H.option , H.option
{ value: show Day_ } { value: show Day_ }
[ H.text "Day" ] [ H.text "Day" ]
, H.option
{ value: show Hour_ }
[ H.text "Hour" ]
, H.option
{ value: show Minute_ }
[ H.text "Minute" ]
, H.option
{ value: show Second_ }
[ H.text "Second" ]
] ]
] ]
] ]
......
...@@ -9,7 +9,9 @@ import Data.Int as Int ...@@ -9,7 +9,9 @@ import Data.Int as Int
import Data.Maybe (Maybe(..)) import Data.Maybe (Maybe(..))
import Data.Newtype (unwrap) import Data.Newtype (unwrap)
import Data.Number as Number import Data.Number as Number
import Gargantext.Components.PhyloExplorer.API (Clique(..), CliqueFilter, ReflexiveClique(..), ReflexiveTimeUnit, TimeUnitCriteria(..), UpdateData(..), extractCriteria, fromReflexiveTimeUnit, toReflexiveTimeUnit) import Gargantext.Components.PhyloExplorer.API (Clique(..), CliqueFilter, ReflexiveClique(..), UpdateData(..), extractCriteria)
import Gargantext.Components.PhyloExplorer.JSON (TimeUnitCriteria(..))
import Gargantext.Components.PhyloExplorer.Types (ReflexiveTimeUnit, fromReflexiveTimeUnit, toReflexiveTimeUnit)
import Gargantext.Components.PhyloExplorer.Config.ConfigForm (FormData) import Gargantext.Components.PhyloExplorer.Config.ConfigForm (FormData)
import Gargantext.Types (FrontendError(..)) import Gargantext.Types (FrontendError(..))
import Gargantext.Utils (getter) import Gargantext.Utils (getter)
......
...@@ -12,6 +12,8 @@ module Gargantext.Components.PhyloExplorer.JSON ...@@ -12,6 +12,8 @@ module Gargantext.Components.PhyloExplorer.JSON
, SeaElevation(..) , SeaElevation(..)
, PhyloSimilarity(..) , PhyloSimilarity(..)
, TimeUnit(..) , TimeUnit(..)
, TimeUnitCriteria(..)
, extractTimeUnitCriteria
, ConfigData(..) , ConfigData(..)
, Quality(..) , Quality(..)
) where ) where
...@@ -21,9 +23,13 @@ import Gargantext.Prelude ...@@ -21,9 +23,13 @@ import Gargantext.Prelude
import Data.Generic.Rep (class Generic) import Data.Generic.Rep (class Generic)
import Data.Generic.Rep as GR import Data.Generic.Rep as GR
import Data.Maybe (Maybe) import Data.Maybe (Maybe)
import Data.Newtype (class Newtype)
import Data.Show.Generic (genericShow) import Data.Show.Generic (genericShow)
import Foreign as F
import Gargantext.Utils.SimpleJSON (untaggedSumRep) import Gargantext.Utils.SimpleJSON (untaggedSumRep)
import Record as Record
import Simple.JSON as JSON import Simple.JSON as JSON
import Type.Proxy (Proxy(..))
newtype PhyloJSON = PhyloJSON newtype PhyloJSON = PhyloJSON
{ pd_corpusId :: Int { pd_corpusId :: Int
...@@ -335,40 +341,191 @@ instance Show PhyloSimilarity where ...@@ -335,40 +341,191 @@ instance Show PhyloSimilarity where
instance JSON.ReadForeign PhyloSimilarity where instance JSON.ReadForeign PhyloSimilarity where
readImpl f = GR.to <$> untaggedSumRep f readImpl f = GR.to <$> untaggedSumRep f
newtype TimeUnitCriteria = TimeUnitCriteria
{ period :: Int
, step :: Int
, matchingFrame :: Int
}
derive instance Generic TimeUnitCriteria _
derive instance Eq TimeUnitCriteria
derive instance Newtype TimeUnitCriteria _
instance Show TimeUnitCriteria where
show = genericShow
-- derive newtype instance JSON.ReadForeign TimeUnitCriteria
data TimeUnit data TimeUnit
= Epoch = Epoch TimeUnitCriteria
{ _epoch_period :: Int | Year TimeUnitCriteria
, _epoch_step :: Int | Month TimeUnitCriteria
, _epoch_matchingFrame :: Int | Week TimeUnitCriteria
} | Day TimeUnitCriteria
| Year | Hour TimeUnitCriteria
{ _year_period :: Int | Minute TimeUnitCriteria
, _year_step :: Int | Second TimeUnitCriteria
, _year_matchingFrame :: Int
}
| Month
{ _month_period :: Int
, _month_step :: Int
, _month_matchingFrame :: Int
}
| Week
{ _week_period :: Int
, _week_step :: Int
, _week_matchingFrame :: Int
}
| Day
{ _day_period :: Int
, _day_step :: Int
, _day_matchingFrame :: Int
}
derive instance Generic TimeUnit _ derive instance Generic TimeUnit _
derive instance Eq TimeUnit derive instance Eq TimeUnit
instance Show TimeUnit where instance Show TimeUnit where
show = genericShow show = genericShow
-- | TODO Simplify the backend response so that such long parsers aren't necessary
instance JSON.ReadForeign TimeUnit where instance JSON.ReadForeign TimeUnit where
readImpl f = GR.to <$> untaggedSumRep f readImpl f = do
{ tag } <- JSON.readImpl f :: F.F { tag :: String }
case tag of
"Epoch" -> do
{ _epoch_period: period, _epoch_step: step, _epoch_matchingFrame: matchingFrame } <- JSON.readImpl f :: F.F { _epoch_period :: Int, _epoch_step :: Int, _epoch_matchingFrame :: Int }
pure $ Epoch $ TimeUnitCriteria { period, step, matchingFrame }
"Year" -> do
{ _year_period: period, _year_step: step, _year_matchingFrame: matchingFrame } <- JSON.readImpl f :: F.F { _year_period :: Int, _year_step :: Int, _year_matchingFrame :: Int }
pure $ Year $ TimeUnitCriteria { period, step, matchingFrame }
"Month" -> do
{ _month_period: period, _month_step: step, _month_matchingFrame: matchingFrame } <- JSON.readImpl f :: F.F { _month_period :: Int, _month_step :: Int, _month_matchingFrame :: Int }
pure $ Month $ TimeUnitCriteria { period, step, matchingFrame }
"Week" -> do
{ _week_period: period, _week_step: step, _week_matchingFrame: matchingFrame } <- JSON.readImpl f :: F.F { _week_period :: Int, _week_step :: Int, _week_matchingFrame :: Int }
pure $ Week $ TimeUnitCriteria { period, step, matchingFrame }
"Day" -> do
{ _day_period: period, _day_step: step, _day_matchingFrame: matchingFrame } <- JSON.readImpl f :: F.F { _day_period :: Int, _day_step :: Int, _day_matchingFrame :: Int }
pure $ Day $ TimeUnitCriteria { period, step, matchingFrame }
"Hour" -> do
{ _hour_period: period, _hour_step: step, _hour_matchingFrame: matchingFrame } <- JSON.readImpl f :: F.F { _hour_period :: Int, _hour_step :: Int, _hour_matchingFrame :: Int }
pure $ Hour $ TimeUnitCriteria { period, step, matchingFrame }
"Minute" -> do
{ _minute_period: period, _minute_step: step, _minute_matchingFrame: matchingFrame } <- JSON.readImpl f :: F.F { _minute_period :: Int, _minute_step :: Int, _minute_matchingFrame :: Int }
pure $ Minute $ TimeUnitCriteria { period, step, matchingFrame }
"Second" -> do
{ _second_period: period, _second_step: step, _second_matchingFrame: matchingFrame } <- JSON.readImpl f :: F.F { _second_period :: Int, _second_step :: Int, _second_matchingFrame :: Int }
pure $ Second $ TimeUnitCriteria { period, step, matchingFrame }
s -> F.fail $ F.ForeignError $ "Unknown tag " <> s
instance JSON.WriteForeign TimeUnit where
writeImpl = case _ of
Epoch (TimeUnitCriteria o) -> (JSON.writeImpl <<< parseEpoch) o
Year (TimeUnitCriteria o) -> (JSON.writeImpl <<< parseYear) o
Month (TimeUnitCriteria o) -> (JSON.writeImpl <<< parseMonth) o
Week (TimeUnitCriteria o) -> (JSON.writeImpl <<< parseWeek) o
Day (TimeUnitCriteria o) -> (JSON.writeImpl <<< parseDay) o
Hour (TimeUnitCriteria o) -> (JSON.writeImpl <<< parseHour) o
Minute (TimeUnitCriteria o) -> (JSON.writeImpl <<< parseMinute) o
Second (TimeUnitCriteria o) -> (JSON.writeImpl <<< parseSecond) o
where
parseEpoch =
Record.rename
(Proxy :: Proxy "period")
(Proxy :: Proxy "_epoch_period")
>>> Record.rename
(Proxy :: Proxy "step")
(Proxy :: Proxy "_epoch_step")
>>> Record.rename
(Proxy :: Proxy "matchingFrame")
(Proxy :: Proxy "_epoch_matchingFrame")
>>> Record.insert
(Proxy :: Proxy "tag")
"Epoch"
parseYear =
Record.rename
(Proxy :: Proxy "period")
(Proxy :: Proxy "_year_period")
>>> Record.rename
(Proxy :: Proxy "step")
(Proxy :: Proxy "_year_step")
>>> Record.rename
(Proxy :: Proxy "matchingFrame")
(Proxy :: Proxy "_year_matchingFrame")
>>> Record.insert
(Proxy :: Proxy "tag")
"Year"
parseMonth =
Record.rename
(Proxy :: Proxy "period")
(Proxy :: Proxy "_month_period")
>>> Record.rename
(Proxy :: Proxy "step")
(Proxy :: Proxy "_month_step")
>>> Record.rename
(Proxy :: Proxy "matchingFrame")
(Proxy :: Proxy "_month_matchingFrame")
>>> Record.insert
(Proxy :: Proxy "tag")
"Month"
parseWeek =
Record.rename
(Proxy :: Proxy "period")
(Proxy :: Proxy "_week_period")
>>> Record.rename
(Proxy :: Proxy "step")
(Proxy :: Proxy "_week_step")
>>> Record.rename
(Proxy :: Proxy "matchingFrame")
(Proxy :: Proxy "_week_matchingFrame")
>>> Record.insert
(Proxy :: Proxy "tag")
"Week"
parseDay =
Record.rename
(Proxy :: Proxy "period")
(Proxy :: Proxy "_day_period")
>>> Record.rename
(Proxy :: Proxy "step")
(Proxy :: Proxy "_day_step")
>>> Record.rename
(Proxy :: Proxy "matchingFrame")
(Proxy :: Proxy "_day_matchingFrame")
>>> Record.insert
(Proxy :: Proxy "tag")
"Day"
parseHour =
Record.rename
(Proxy :: Proxy "period")
(Proxy :: Proxy "_hour_period")
>>> Record.rename
(Proxy :: Proxy "step")
(Proxy :: Proxy "_hour_step")
>>> Record.rename
(Proxy :: Proxy "matchingFrame")
(Proxy :: Proxy "_hour_matchingFrame")
>>> Record.insert
(Proxy :: Proxy "tag")
"Hour"
parseMinute =
Record.rename
(Proxy :: Proxy "period")
(Proxy :: Proxy "_minute_period")
>>> Record.rename
(Proxy :: Proxy "step")
(Proxy :: Proxy "_minute_step")
>>> Record.rename
(Proxy :: Proxy "matchingFrame")
(Proxy :: Proxy "_minute_matchingFrame")
>>> Record.insert
(Proxy :: Proxy "tag")
"Minute"
parseSecond =
Record.rename
(Proxy :: Proxy "period")
(Proxy :: Proxy "_second_period")
>>> Record.rename
(Proxy :: Proxy "step")
(Proxy :: Proxy "_second_step")
>>> Record.rename
(Proxy :: Proxy "matchingFrame")
(Proxy :: Proxy "_second_matchingFrame")
>>> Record.insert
(Proxy :: Proxy "tag")
"Second"
extractTimeUnitCriteria :: TimeUnit -> TimeUnitCriteria
extractTimeUnitCriteria (Epoch tuc) = tuc
extractTimeUnitCriteria (Year tuc) = tuc
extractTimeUnitCriteria (Month tuc) = tuc
extractTimeUnitCriteria (Week tuc) = tuc
extractTimeUnitCriteria (Day tuc) = tuc
extractTimeUnitCriteria (Hour tuc) = tuc
extractTimeUnitCriteria (Minute tuc) = tuc
extractTimeUnitCriteria (Second tuc) = tuc
data Quality = Quality data Quality = Quality
{ _qua_granularity :: Number { _qua_granularity :: Number
......
'use strict'; 'use strict';
import dayjs from '../../src/external-deps/dayjs.min.js';
//////////////////////////////////////////////////////////////////////////////// ////////////////////////////////////////////////////////////////////////////////
/// FIELDS /// FIELDS
//////////////////////////////////////////////////////////////////////////////// ////////////////////////////////////////////////////////////////////////////////
...@@ -54,9 +56,9 @@ function contains(str,arr) { ...@@ -54,9 +56,9 @@ function contains(str,arr) {
* @returns {Date} * @returns {Date}
*/ */
function addDays(date, days) { function addDays(date, days) {
var result = new Date(date); return dayjs(date)
result.setDate(result.getDate() + days); .add(days, 'days')
return result; .toDate();
} }
/** /**
* @name removeDays * @name removeDays
...@@ -65,9 +67,9 @@ function addDays(date, days) { ...@@ -65,9 +67,9 @@ function addDays(date, days) {
* @returns {Date} * @returns {Date}
*/ */
function removeDays(date, days) { function removeDays(date, days) {
var result = new Date(date); return dayjs(date)
result.setDate(result.getDate() - days); .subtract(days, 'days')
return result; .toDate();
} }
/** /**
* @name appendCSS * @name appendCSS
...@@ -1229,9 +1231,15 @@ function drawPhylo(branches, periods, groups, links, aLinks, bLinks, frame) { ...@@ -1229,9 +1231,15 @@ function drawPhylo(branches, periods, groups, links, aLinks, bLinks, frame) {
/* labels */ /* labels */
var firstDate = Math.min(...groups.map(g => (g.from).getFullYear())) let firstDate = Math.min(0, ...groups.map(g => g.from.getFullYear()))
var yLabels = (periods.map(p => ({y:p.y,from:p.from,to:p.to,label:(p.from).getFullYear()}))).filter(p => p.label >= firstDate); let yLabels = periods
var xLabels = toXLabels(branches,groups,frame[2]); .map(p => ({ y: p.y,
from: p.from,
to: p.to,
label: p.from.getFullYear()
}))
.filter(p => p.label >= firstDate);
let xLabels = toXLabels(branches,groups,frame[2]);
/* weight */ /* weight */
...@@ -1305,9 +1313,23 @@ function drawPhylo(branches, periods, groups, links, aLinks, bLinks, frame) { ...@@ -1305,9 +1313,23 @@ function drawPhylo(branches, periods, groups, links, aLinks, bLinks, frame) {
var linkGen = d3.linkVertical(); var linkGen = d3.linkVertical();
var groupLinks = links.map(l => ({source: findGroup(groups, l.from, xScale, yScale), target: findGroup(groups, l.to, xScale, yScale),from: l.from, to: l.to, label: l.label})); var groupLinks = links.map(l => ({
source: findGroup(groups, l.from, xScale, yScale),
var groupAncestors = aLinks.map(l => ({source: findGroup(groups, l.from, xScale, yScale), target: findGroup(groups, l.to, xScale, yScale),from: l.from, to: l.to, label: l.label})); target: findGroup(groups, l.to, xScale, yScale),
from: l.from,
to: l.to,
label: l.label
}));
var groupAncestors = aLinks.map(l => ({
source: findGroup(groups, l.from, xScale, yScale),
target: findGroup(groups, l.to, xScale, yScale),
from: l.from,
to: l.to,
label: l.label
}));
console.log('groupLinks', groupLinks, 'groupAncestors', groupAncestors);
panel panel
.selectAll("path") .selectAll("path")
...@@ -1705,42 +1727,51 @@ function addMarkY(ticks) { ...@@ -1705,42 +1727,51 @@ function addMarkY(ticks) {
* <Date> * <Date>
*/ */
function setYDomain(labels) { function setYDomain(labels) {
var ts = ["week","month","day","year","epoch"]; let ts = ["second", "minute", "hour", "week", "month", "day", "year", "epoch"];
//console.log(labels) //console.log(labels)
if (ts.includes(window.timeScale)) { if (ts.includes(window.timeScale)) {
labels = labels.sort(function(d1,d2){return d1.from - d2.from;}) labels = labels.sort(function(d1,d2){return d1.from - d2.from;})
} }
var inf = (labels[0]).from, let inf = labels.length && labels[0].from || dayjs('0001-01-01T00:00:00Z').toDate(),
sup = (labels[labels.length - 1]).to; sup = labels.length && labels[labels.length - 1].to || dayjs().toDate();
if (window.timeScale == "week") { if (window.timeScale == "second") {
inf = addDays(inf,7) inf = dayjs(inf).subtract(1, 'second').toDate();
sup = addDays(sup,7) sup = dayjs(sup).add(1, 'second').toDate();
} else if (window.timeScale == "month") { } else if (window.timeScale == "minute") {
inf = removeDays(inf,31) inf = dayjs(inf).subtract(1, 'minute').toDate();
sup = addDays(sup,31) sup = dayjs(sup).add(1, 'minute').toDate();
} else if (window.timeScale == "day") { } else if (window.timeScale == "hour") {
inf = removeDays(inf,1) inf = dayjs(inf).subtract(1, 'hour').toDate();
sup = addDays(sup,1) sup = dayjs(sup).add(1, 'hour').toDate();
} else if (window.timeScale == "year") { } else if (window.timeScale == "week") {
inf = removeDays(inf,365) inf = addDays(inf,7)
sup = addDays(sup,365) sup = addDays(sup,7)
} else if (window.timeScale == "epoch") { } else if (window.timeScale == "month") {
inf = inf inf = removeDays(inf,31)
sup = sup sup = addDays(sup,31)
} else { } else if (window.timeScale == "day") {
inf = new Date((inf.getFullYear() - 1),0,0); inf = removeDays(inf,1)
sup = new Date((sup.getFullYear() + 1),0,0); sup = addDays(sup,1)
} } else if (window.timeScale == "year") {
inf = removeDays(inf,365)
sup = addDays(sup,365)
} else if (window.timeScale == "epoch") {
inf = inf
sup = sup
} else {
inf = new Date((inf.getFullYear() - 1),0,0);
sup = new Date((sup.getFullYear() + 1),0,0);
}
// inf = new Date((inf - 1),6,0); // inf = new Date((inf - 1),6,0);
// inf = new Date((1950 - 1),6,0); // inf = new Date((1950 - 1),6,0);
// sup = new Date((sup + 1),0,0); // sup = new Date((sup + 1),0,0);
return [inf,sup]; return [inf,sup];
} }
/** /**
* @name findGroup * @name findGroup
......
...@@ -4,7 +4,7 @@ module Gargantext.Components.PhyloExplorer.DetailsTab ...@@ -4,7 +4,7 @@ module Gargantext.Components.PhyloExplorer.DetailsTab
import Gargantext.Prelude import Gargantext.Prelude
import Gargantext.Components.PhyloExplorer.JSON (Cluster(..), TimeUnit(..)) import Gargantext.Components.PhyloExplorer.JSON (Cluster(..), TimeUnit(..), TimeUnitCriteria(..))
import Gargantext.Components.PhyloExplorer.Store as PhyloStore import Gargantext.Components.PhyloExplorer.Store as PhyloStore
import Gargantext.Components.PhyloExplorer.Types (PhyloConfig(..), PhyloData(..)) import Gargantext.Components.PhyloExplorer.Types (PhyloConfig(..), PhyloData(..))
import Gargantext.Utils (nbsp) import Gargantext.Utils (nbsp)
...@@ -143,33 +143,19 @@ detailsTimeUnit t = ...@@ -143,33 +143,19 @@ detailsTimeUnit t =
] ]
where where
parseTimeUnit :: TimeUnit -> Array R.Element parseTimeUnit :: TimeUnit -> Array R.Element
parseTimeUnit (Epoch { _epoch_period, _epoch_step, _epoch_matchingFrame }) = parseTimeUnit (Epoch tuc) = [ detailsParams "Epoch" "Time unit" ] <> parseTimeUnitCriteria tuc
[ detailsParams "Epoch" "Time unit" parseTimeUnit (Year tuc) = [ detailsParams "Year" "Time unit" ] <> parseTimeUnitCriteria tuc
, detailsParams _epoch_period "Period" parseTimeUnit (Month tuc) = [ detailsParams "Month" "Time unit" ] <> parseTimeUnitCriteria tuc
, detailsParams _epoch_step "Step" parseTimeUnit (Week tuc) = [ detailsParams "Week" "Time unit" ] <> parseTimeUnitCriteria tuc
, detailsParams _epoch_matchingFrame "Matching frame" parseTimeUnit (Day tuc) = [ detailsParams "Day" "Time unit" ] <> parseTimeUnitCriteria tuc
] parseTimeUnit (Hour tuc) = [ detailsParams "Hour" "Time unit" ] <> parseTimeUnitCriteria tuc
parseTimeUnit (Year { _year_period, _year_step, _year_matchingFrame }) = parseTimeUnit (Minute tuc) = [ detailsParams "Minute" "Time unit" ] <> parseTimeUnitCriteria tuc
[ detailsParams "Year" "Time unit" parseTimeUnit (Second tuc) = [ detailsParams "Second" "Time unit" ] <> parseTimeUnitCriteria tuc
, detailsParams _year_period "Period"
, detailsParams _year_step "Step" parseTimeUnitCriteria :: TimeUnitCriteria -> Array R.Element
, detailsParams _year_matchingFrame "Matching frame" parseTimeUnitCriteria (TimeUnitCriteria { period, step, matchingFrame }) =
] [ detailsParams period "Period"
parseTimeUnit (Month { _month_period, _month_step, _month_matchingFrame }) = , detailsParams step "Step"
[ detailsParams "Month" "Time unit" , detailsParams matchingFrame "Matching frame"
, detailsParams _month_period "Period"
, detailsParams _month_step "Step"
, detailsParams _month_matchingFrame "Matching frame"
]
parseTimeUnit (Week { _week_period, _week_step, _week_matchingFrame }) =
[ detailsParams "Week" "Time unit"
, detailsParams _week_period "Period"
, detailsParams _week_step "Step"
, detailsParams _week_matchingFrame "Matching frame"
]
parseTimeUnit (Day { _day_period, _day_step, _day_matchingFrame }) =
[ detailsParams "Day" "Time unit"
, detailsParams _day_period "Period"
, detailsParams _day_step "Step"
, detailsParams _day_matchingFrame "Matching frame"
] ]
'use strict'; 'use strict';
import dayjs from '../../src/external-deps/dayjs.min.js';
/** /**
* @name yearToDate * @name yearToDate
* @param {string} year * @param {string} year
...@@ -45,3 +48,16 @@ export function utcStringToDate(str) { ...@@ -45,3 +48,16 @@ export function utcStringToDate(str) {
return d; return d;
} }
export function dayToDate(day) {
return dayjs(parseInt(day)*24*60*60*1000).toDate();
}
export function hourToDate(day) {
return dayjs(parseInt(day)*60*60*1000).toDate();
}
export function minuteToDate(day) {
return dayjs(parseInt(day)*60*1000).toDate();
}
export function secondToDate(day) {
return dayjs(parseInt(day)*1000).toDate();
}
...@@ -22,6 +22,9 @@ module Gargantext.Components.PhyloExplorer.Types ...@@ -22,6 +22,9 @@ module Gargantext.Components.PhyloExplorer.Types
, FrameDoc(..) , FrameDoc(..)
, CacheParams(..) , CacheParams(..)
, defaultCacheParams , defaultCacheParams
, ReflexiveTimeUnit(..)
, toReflexiveTimeUnit
, fromReflexiveTimeUnit
) where ) where
import Gargantext.Prelude import Gargantext.Prelude
...@@ -39,11 +42,15 @@ import Data.String as String ...@@ -39,11 +42,15 @@ import Data.String as String
import Data.String.Extra (camelCase) import Data.String.Extra (camelCase)
import Data.Tuple as Tuple import Data.Tuple as Tuple
import Data.Tuple.Nested ((/\)) import Data.Tuple.Nested ((/\))
import Gargantext.Components.PhyloExplorer.JSON (Cluster, PhyloJSON(..), PhyloSimilarity(..), Quality(..), RawEdge(..), RawObject(..), Synchrony(..), TimeUnit) import Gargantext.Components.PhyloExplorer.JSON (Cluster, PhyloJSON(..), PhyloSimilarity(..), Quality(..), RawEdge(..), RawObject(..), Synchrony(..), TimeUnit(..), TimeUnitCriteria(..))
import Simple.JSON as JSON import Simple.JSON as JSON
-- @NOTE #219: PureScript Date or stick to JavaScript foreign? -- @NOTE #219: PureScript Date or stick to JavaScript foreign?
foreign import yearToDate :: String -> Date.Date foreign import yearToDate :: String -> Date.Date
foreign import dayToDate :: String -> Date.Date
foreign import hourToDate :: String -> Date.Date
foreign import minuteToDate :: String -> Date.Date
foreign import secondToDate :: String -> Date.Date
foreign import stringToDate :: String -> Date.Date foreign import stringToDate :: String -> Date.Date
foreign import utcStringToDate :: String -> Date.Date foreign import utcStringToDate :: String -> Date.Date
...@@ -102,12 +109,14 @@ parseToPhyloSet (PhyloJSON o@{ pd_data: Just p, pd_config: Just c }) = Just $ Ph ...@@ -102,12 +109,14 @@ parseToPhyloSet (PhyloJSON o@{ pd_data: Just p, pd_config: Just c }) = Just $ Ph
where where
epochTS = p.phyloTimeScale == "epoch" epochTS = p.phyloTimeScale == "epoch"
timeUnit = toReflexiveTimeUnit c.timeUnit
ancestorLinks = parseAncestorLinks p.edges ancestorLinks = parseAncestorLinks p.edges
branchLinks = parseBranchLinks p.edges branchLinks = parseBranchLinks p.edges
branches = parseBranches p.objects branches = parseBranches p.objects
groups = parseGroups epochTS p.objects groups = parseGroups timeUnit epochTS p.objects
links = parseLinks p.edges links = parseLinks p.edges
periods = parsePeriods epochTS p.objects periods = parsePeriods timeUnit epochTS p.objects
parseToPhyloSet _ = Nothing parseToPhyloSet _ = Nothing
---------------------------------------------------------------------- ----------------------------------------------------------------------
...@@ -217,15 +226,15 @@ derive instance Eq Period ...@@ -217,15 +226,15 @@ derive instance Eq Period
instance Show Period where instance Show Period where
show = genericShow show = genericShow
parsePeriods :: Boolean -> Array RawObject -> Array Period parsePeriods :: ReflexiveTimeUnit -> Boolean -> Array RawObject -> Array Period
parsePeriods epoch = map parse parsePeriods tu epoch = map parse
>>> Array.catMaybes >>> Array.catMaybes
where where
parse :: RawObject -> Maybe Period parse :: RawObject -> Maybe Period
parse (PeriodToNode o) = Just $ Period parse (PeriodToNode o) = Just $ Period
{ from: parseNodeDate o.strFrom o.from epoch { from: parseNodeDate tu o.strFrom o.from epoch
, to: parseNodeDate o.strTo o.to epoch , to: parseNodeDate tu o.strTo o.to epoch
, y: Tuple.snd $ parsePos o.pos , y: Tuple.snd $ parsePos o.pos
} }
parse _ = Nothing parse _ = Nothing
...@@ -252,8 +261,8 @@ derive instance Eq Group ...@@ -252,8 +261,8 @@ derive instance Eq Group
instance Show Group where instance Show Group where
show = genericShow show = genericShow
parseGroups :: Boolean -> Array RawObject -> Array Group parseGroups :: ReflexiveTimeUnit -> Boolean -> Array RawObject -> Array Group
parseGroups epoch = map parse parseGroups tu epoch = map parse
>>> Array.catMaybes >>> Array.catMaybes
where where
...@@ -261,13 +270,13 @@ parseGroups epoch = map parse ...@@ -261,13 +270,13 @@ parseGroups epoch = map parse
parse (GroupToNode o) = Just $ Group parse (GroupToNode o) = Just $ Group
{ bId: parseInt o.bId { bId: parseInt o.bId
, foundation: stringedArrayToArray' o.foundation , foundation: stringedArrayToArray' o.foundation
, from: parseNodeDate o.strFrom o.from epoch , from: parseNodeDate tu o.strFrom o.from epoch
, gId: o._gvid , gId: o._gvid
, label: stringedArrayToArray o.lbl , label: stringedArrayToArray o.lbl
, role: stringedArrayToArray_ o.role , role: stringedArrayToArray_ o.role
, size: parseInt o.support , size: parseInt o.support
, source: parseSources o.source , source: parseSources o.source
, to: parseNodeDate o.strTo o.to epoch , to: parseNodeDate tu o.strTo o.to epoch
, weight: stringedMaybeToNumber o.weight , weight: stringedMaybeToNumber o.weight
, x: Tuple.fst $ parsePos o.pos , x: Tuple.fst $ parsePos o.pos
, y: Tuple.snd $ parsePos o.pos , y: Tuple.snd $ parsePos o.pos
...@@ -457,13 +466,17 @@ parseBB :: String -> Array Number ...@@ -457,13 +466,17 @@ parseBB :: String -> Array Number
parseBB = String.split (String.Pattern ",") parseBB = String.split (String.Pattern ",")
>>> map parseFloat >>> map parseFloat
parseNodeDate :: Maybe String -> String -> Boolean -> Date.Date parseNodeDate :: ReflexiveTimeUnit -> Maybe String -> String -> Boolean -> Date.Date
-- parseNodeDate Nothing year _ = yearToDate(year) -- parseNodeDate Nothing year _ = yearToDate(year)
-- parseNodeDate (Just str) _ true = utcStringToDate(str) -- parseNodeDate (Just str) _ true = utcStringToDate(str)
-- parseNodeDate (Just str) _ false = stringToDate(str) -- parseNodeDate (Just str) _ false = stringToDate(str)
-- @NOTE #219 ^ as soon as the issue regarding `Date` (< 1970) is resolved -- @NOTE #219 ^ as soon as the issue regarding `Date` (< 1970) is resolved
-- please uncomment above lines + delete below one -- please uncomment above lines + delete below one
parseNodeDate _ y _ = yearToDate (y) parseNodeDate Day_ _ d _ = dayToDate (d)
parseNodeDate Hour_ _ d _ = hourToDate (d)
parseNodeDate Minute_ _ d _ = minuteToDate (d)
parseNodeDate Second_ _ d _ = secondToDate (d)
parseNodeDate _ _ y _ = yearToDate (y)
parsePos :: String -> Tuple.Tuple Number Number parsePos :: String -> Tuple.Tuple Number Number
parsePos = String.split (String.Pattern ",") parsePos = String.split (String.Pattern ",")
...@@ -597,3 +610,52 @@ defaultCacheParams = CacheParams ...@@ -597,3 +610,52 @@ defaultCacheParams = CacheParams
{ expandSelection: true { expandSelection: true
, expandNeighborhood: true , expandNeighborhood: true
} }
data ReflexiveTimeUnit
= Epoch_
| Year_
| Month_
| Week_
| Day_
| Hour_
| Minute_
| Second_
derive instance Generic ReflexiveTimeUnit _
derive instance Eq ReflexiveTimeUnit
instance Show ReflexiveTimeUnit where
show = genericShow
instance Read ReflexiveTimeUnit where
read :: String -> Maybe ReflexiveTimeUnit
read = case _ of
"Epoch_" -> Just Epoch_
"Year_" -> Just Year_
"Month_" -> Just Month_
"Week_" -> Just Week_
"Day_" -> Just Day_
"Hour_" -> Just Hour_
"Minute_" -> Just Minute_
"Second_" -> Just Second_
_ -> Nothing
toReflexiveTimeUnit :: TimeUnit -> ReflexiveTimeUnit
toReflexiveTimeUnit (Epoch _) = Epoch_
toReflexiveTimeUnit (Year _) = Year_
toReflexiveTimeUnit (Month _) = Month_
toReflexiveTimeUnit (Week _) = Week_
toReflexiveTimeUnit (Day _) = Day_
toReflexiveTimeUnit (Hour _) = Hour_
toReflexiveTimeUnit (Minute _) = Minute_
toReflexiveTimeUnit (Second _) = Second_
fromReflexiveTimeUnit :: ReflexiveTimeUnit -> TimeUnitCriteria -> TimeUnit
fromReflexiveTimeUnit Epoch_ c = Epoch c
fromReflexiveTimeUnit Year_ c = Year c
fromReflexiveTimeUnit Month_ c = Month c
fromReflexiveTimeUnit Week_ c = Week c
fromReflexiveTimeUnit Day_ c = Day c
fromReflexiveTimeUnit Hour_ c = Hour c
fromReflexiveTimeUnit Minute_ c = Minute c
fromReflexiveTimeUnit Second_ c = Second c
...@@ -17,6 +17,10 @@ import Toestand as T ...@@ -17,6 +17,10 @@ import Toestand as T
here :: R2.Here here :: R2.Here
here = R2.here "Gargantext.Config.Utils" here = R2.here "Gargantext.Config.Utils"
errorHandlerThatShowsOnFrontend :: T.Box (Array FrontendError) -> RESTError -> Effect Unit
errorHandlerThatShowsOnFrontend errors error = do
T.modify_ (A.cons $ FRESTError { error }) errors
handleRESTError handleRESTError
:: forall a :: forall a
. R2.HerePrefix . R2.HerePrefix
...@@ -25,7 +29,7 @@ handleRESTError ...@@ -25,7 +29,7 @@ handleRESTError
-> (a -> Aff Unit) -> (a -> Aff Unit)
-> Aff Unit -> Aff Unit
handleRESTError herePrefix errors (Left error) _ = liftEffect $ do handleRESTError herePrefix errors (Left error) _ = liftEffect $ do
T.modify_ (A.cons $ FRESTError { error }) errors errorHandlerThatShowsOnFrontend errors error
logRESTError herePrefix error logRESTError herePrefix error
-- here.warn2 "[handleTaskError] RESTError" error -- here.warn2 "[handleTaskError] RESTError" error
handleRESTError _ _ (Right task) handler = handler task handleRESTError _ _ (Right task) handler = handler task
......
...@@ -12,7 +12,7 @@ import Effect.Exception (error) ...@@ -12,7 +12,7 @@ import Effect.Exception (error)
import Gargantext.Components.App.Store as Store import Gargantext.Components.App.Store as Store
import Gargantext.Components.LoadingSpinner (loadingSpinner) import Gargantext.Components.LoadingSpinner (loadingSpinner)
import Gargantext.Config.REST (RESTError, AffRESTError, logRESTError) import Gargantext.Config.REST (RESTError, AffRESTError, logRESTError)
import Gargantext.Config.Utils (handleRESTError) import Gargantext.Config.Utils (errorHandlerThatShowsOnFrontend, handleRESTError)
import Gargantext.Prelude import Gargantext.Prelude
import Gargantext.Types (FrontendError(..)) import Gargantext.Types (FrontendError(..))
import Gargantext.Utils.CacheAPI as GUC import Gargantext.Utils.CacheAPI as GUC
...@@ -55,18 +55,16 @@ useLoader ...@@ -55,18 +55,16 @@ useLoader
{ errors } <- Store.use { errors } <- Store.use
useLoader' useLoader'
{ errorHandler: errorHandler' errors { errorHandler: \error -> do
errorHandlerThatShowsOnFrontend errors error
-- default error handler
case errorHandler of
Nothing -> logRESTError herePrefix error
Just eh -> eh error
, loader , loader
, path , path
, render , render
} }
where
errorHandler' errors error = do
T.modify_ (A.cons $ FRESTError { error }) errors
-- default error handler
case errorHandler of
Nothing -> logRESTError herePrefix error
Just eh -> eh error
-- | Version that doesn't use boxes for errors, prefer unticked one -- | Version that doesn't use boxes for errors, prefer unticked one
type UseLoader' path state = type UseLoader' path state =
......
!function(t,e){"object"==typeof exports&&"undefined"!=typeof module?module.exports=e():"function"==typeof define&&define.amd?define(e):(t="undefined"!=typeof globalThis?globalThis:t||self).dayjs=e()}(this,(function(){"use strict";var t=1e3,e=6e4,n=36e5,r="millisecond",i="second",s="minute",u="hour",a="day",o="week",c="month",f="quarter",h="year",d="date",l="Invalid Date",$=/^(\d{4})[-/]?(\d{1,2})?[-/]?(\d{0,2})[Tt\s]*(\d{1,2})?:?(\d{1,2})?:?(\d{1,2})?[.:]?(\d+)?$/,y=/\[([^\]]+)]|Y{1,4}|M{1,4}|D{1,2}|d{1,4}|H{1,2}|h{1,2}|a|A|m{1,2}|s{1,2}|Z{1,2}|SSS/g,M={name:"en",weekdays:"Sunday_Monday_Tuesday_Wednesday_Thursday_Friday_Saturday".split("_"),months:"January_February_March_April_May_June_July_August_September_October_November_December".split("_"),ordinal:function(t){var e=["th","st","nd","rd"],n=t%100;return"["+t+(e[(n-20)%10]||e[n]||e[0])+"]"}},m=function(t,e,n){var r=String(t);return!r||r.length>=e?t:""+Array(e+1-r.length).join(n)+t},v={s:m,z:function(t){var e=-t.utcOffset(),n=Math.abs(e),r=Math.floor(n/60),i=n%60;return(e<=0?"+":"-")+m(r,2,"0")+":"+m(i,2,"0")},m:function t(e,n){if(e.date()<n.date())return-t(n,e);var r=12*(n.year()-e.year())+(n.month()-e.month()),i=e.clone().add(r,c),s=n-i<0,u=e.clone().add(r+(s?-1:1),c);return+(-(r+(n-i)/(s?i-u:u-i))||0)},a:function(t){return t<0?Math.ceil(t)||0:Math.floor(t)},p:function(t){return{M:c,y:h,w:o,d:a,D:d,h:u,m:s,s:i,ms:r,Q:f}[t]||String(t||"").toLowerCase().replace(/s$/,"")},u:function(t){return void 0===t}},g="en",D={};D[g]=M;var p="$isDayjsObject",S=function(t){return t instanceof _||!(!t||!t[p])},w=function t(e,n,r){var i;if(!e)return g;if("string"==typeof e){var s=e.toLowerCase();D[s]&&(i=s),n&&(D[s]=n,i=s);var u=e.split("-");if(!i&&u.length>1)return t(u[0])}else{var a=e.name;D[a]=e,i=a}return!r&&i&&(g=i),i||!r&&g},O=function(t,e){if(S(t))return t.clone();var n="object"==typeof e?e:{};return n.date=t,n.args=arguments,new _(n)},b=v;b.l=w,b.i=S,b.w=function(t,e){return O(t,{locale:e.$L,utc:e.$u,x:e.$x,$offset:e.$offset})};var _=function(){function M(t){this.$L=w(t.locale,null,!0),this.parse(t),this.$x=this.$x||t.x||{},this[p]=!0}var m=M.prototype;return m.parse=function(t){this.$d=function(t){var e=t.date,n=t.utc;if(null===e)return new Date(NaN);if(b.u(e))return new Date;if(e instanceof Date)return new Date(e);if("string"==typeof e&&!/Z$/i.test(e)){var r=e.match($);if(r){var i=r[2]-1||0,s=(r[7]||"0").substring(0,3);return n?new Date(Date.UTC(r[1],i,r[3]||1,r[4]||0,r[5]||0,r[6]||0,s)):new Date(r[1],i,r[3]||1,r[4]||0,r[5]||0,r[6]||0,s)}}return new Date(e)}(t),this.init()},m.init=function(){var t=this.$d;this.$y=t.getFullYear(),this.$M=t.getMonth(),this.$D=t.getDate(),this.$W=t.getDay(),this.$H=t.getHours(),this.$m=t.getMinutes(),this.$s=t.getSeconds(),this.$ms=t.getMilliseconds()},m.$utils=function(){return b},m.isValid=function(){return!(this.$d.toString()===l)},m.isSame=function(t,e){var n=O(t);return this.startOf(e)<=n&&n<=this.endOf(e)},m.isAfter=function(t,e){return O(t)<this.startOf(e)},m.isBefore=function(t,e){return this.endOf(e)<O(t)},m.$g=function(t,e,n){return b.u(t)?this[e]:this.set(n,t)},m.unix=function(){return Math.floor(this.valueOf()/1e3)},m.valueOf=function(){return this.$d.getTime()},m.startOf=function(t,e){var n=this,r=!!b.u(e)||e,f=b.p(t),l=function(t,e){var i=b.w(n.$u?Date.UTC(n.$y,e,t):new Date(n.$y,e,t),n);return r?i:i.endOf(a)},$=function(t,e){return b.w(n.toDate()[t].apply(n.toDate("s"),(r?[0,0,0,0]:[23,59,59,999]).slice(e)),n)},y=this.$W,M=this.$M,m=this.$D,v="set"+(this.$u?"UTC":"");switch(f){case h:return r?l(1,0):l(31,11);case c:return r?l(1,M):l(0,M+1);case o:var g=this.$locale().weekStart||0,D=(y<g?y+7:y)-g;return l(r?m-D:m+(6-D),M);case a:case d:return $(v+"Hours",0);case u:return $(v+"Minutes",1);case s:return $(v+"Seconds",2);case i:return $(v+"Milliseconds",3);default:return this.clone()}},m.endOf=function(t){return this.startOf(t,!1)},m.$set=function(t,e){var n,o=b.p(t),f="set"+(this.$u?"UTC":""),l=(n={},n[a]=f+"Date",n[d]=f+"Date",n[c]=f+"Month",n[h]=f+"FullYear",n[u]=f+"Hours",n[s]=f+"Minutes",n[i]=f+"Seconds",n[r]=f+"Milliseconds",n)[o],$=o===a?this.$D+(e-this.$W):e;if(o===c||o===h){var y=this.clone().set(d,1);y.$d[l]($),y.init(),this.$d=y.set(d,Math.min(this.$D,y.daysInMonth())).$d}else l&&this.$d[l]($);return this.init(),this},m.set=function(t,e){return this.clone().$set(t,e)},m.get=function(t){return this[b.p(t)]()},m.add=function(r,f){var d,l=this;r=Number(r);var $=b.p(f),y=function(t){var e=O(l);return b.w(e.date(e.date()+Math.round(t*r)),l)};if($===c)return this.set(c,this.$M+r);if($===h)return this.set(h,this.$y+r);if($===a)return y(1);if($===o)return y(7);var M=(d={},d[s]=e,d[u]=n,d[i]=t,d)[$]||1,m=this.$d.getTime()+r*M;return b.w(m,this)},m.subtract=function(t,e){return this.add(-1*t,e)},m.format=function(t){var e=this,n=this.$locale();if(!this.isValid())return n.invalidDate||l;var r=t||"YYYY-MM-DDTHH:mm:ssZ",i=b.z(this),s=this.$H,u=this.$m,a=this.$M,o=n.weekdays,c=n.months,f=n.meridiem,h=function(t,n,i,s){return t&&(t[n]||t(e,r))||i[n].slice(0,s)},d=function(t){return b.s(s%12||12,t,"0")},$=f||function(t,e,n){var r=t<12?"AM":"PM";return n?r.toLowerCase():r};return r.replace(y,(function(t,r){return r||function(t){switch(t){case"YY":return String(e.$y).slice(-2);case"YYYY":return b.s(e.$y,4,"0");case"M":return a+1;case"MM":return b.s(a+1,2,"0");case"MMM":return h(n.monthsShort,a,c,3);case"MMMM":return h(c,a);case"D":return e.$D;case"DD":return b.s(e.$D,2,"0");case"d":return String(e.$W);case"dd":return h(n.weekdaysMin,e.$W,o,2);case"ddd":return h(n.weekdaysShort,e.$W,o,3);case"dddd":return o[e.$W];case"H":return String(s);case"HH":return b.s(s,2,"0");case"h":return d(1);case"hh":return d(2);case"a":return $(s,u,!0);case"A":return $(s,u,!1);case"m":return String(u);case"mm":return b.s(u,2,"0");case"s":return String(e.$s);case"ss":return b.s(e.$s,2,"0");case"SSS":return b.s(e.$ms,3,"0");case"Z":return i}return null}(t)||i.replace(":","")}))},m.utcOffset=function(){return 15*-Math.round(this.$d.getTimezoneOffset()/15)},m.diff=function(r,d,l){var $,y=this,M=b.p(d),m=O(r),v=(m.utcOffset()-this.utcOffset())*e,g=this-m,D=function(){return b.m(y,m)};switch(M){case h:$=D()/12;break;case c:$=D();break;case f:$=D()/3;break;case o:$=(g-v)/6048e5;break;case a:$=(g-v)/864e5;break;case u:$=g/n;break;case s:$=g/e;break;case i:$=g/t;break;default:$=g}return l?$:b.a($)},m.daysInMonth=function(){return this.endOf(c).$D},m.$locale=function(){return D[this.$L]},m.locale=function(t,e){if(!t)return this.$L;var n=this.clone(),r=w(t,e,!0);return r&&(n.$L=r),n},m.clone=function(){return b.w(this.$d,this)},m.toDate=function(){return new Date(this.valueOf())},m.toJSON=function(){return this.isValid()?this.toISOString():null},m.toISOString=function(){return this.$d.toISOString()},m.toString=function(){return this.$d.toUTCString()},M}(),k=_.prototype;return O.prototype=k,[["$ms",r],["$s",i],["$m",s],["$H",u],["$W",a],["$M",c],["$y",h],["$D",d]].forEach((function(t){k[t[1]]=function(e){return this.$g(e,t[0],t[1])}})),O.extend=function(t,e){return t.$i||(t(e,_,O),t.$i=!0),O},O.locale=w,O.isDayjs=S,O.unix=function(t){return O(1e3*t)},O.en=D[g],O.Ls=D,O.p={},O}));
\ No newline at end of file
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment