Commit 9750af2b authored by Alfredo Di Napoli's avatar Alfredo Di Napoli

Rework Node story fixers in terms of forest and trees

This commit generalises the API we have seen for Ngrams and
NgramsElement to Ngrams and NgramsRepoElement, making it suitable for
the NodeStory manipulations.

Furthermore, we use zippers to efficiently traverse forests.

This paves the way for a more disciplined way of handling forests of
ngrams.
parent 3c2dab6d
Pipeline #7763 failed with stages
in 25 minutes and 13 seconds
......@@ -570,6 +570,7 @@ library
, json-stream ^>= 0.4.2.4
, lens >= 5.2.2 && < 5.3
, lens-aeson < 1.3
, list-zipper
, massiv < 1.1
, matrix ^>= 0.3.6.1
, mime-mail >= 0.5.1
......
......@@ -89,6 +89,8 @@ module Gargantext.API.Ngrams
, PatchHistory(..)
, newNgramsFromNgramsStatePatch
, filterNgramsNodes
-- * Operations on a forest
, buildForest
, destroyForest
, pruneForest
......@@ -107,7 +109,8 @@ import Data.Text.Lazy.IO as DTL ( writeFile )
import Data.Tree
import Gargantext.API.Ngrams.Tools (getNodeStory)
import Gargantext.API.Ngrams.Types
import Gargantext.Core.NodeStory (ArchiveList, HasNodeStory, NgramsStatePatch', a_history, a_state, a_version, currentVersion, NodeStoryEnv, hasNodeArchiveStoryImmediateSaver, hasNodeStoryImmediateSaver, HasNodeStoryEnv (..))
import Gargantext.Core.NodeStory hiding (buildForest)
import Gargantext.Core.NodeStory qualified as NodeStory
import Gargantext.Core.Text.Ngrams (Ngrams, NgramsType)
import Gargantext.Core.Types (ListType(..), NodeId, ListId, TODO, assertValid, ContextId, HasValidationError)
import Gargantext.Core.Types.Query (Limit(..), Offset(..), MinSize(..), MaxSize(..))
......@@ -462,15 +465,12 @@ matchingNode listType minSize maxSize searchQuery inputNode =
&& searchQuery (inputNode ^. ne_ngrams)
&& matchesListType (inputNode ^. ne_list)
-- | Builds an ngrams forest from the input ngrams table map.
-- | Version of 'buildForest' specialised over the 'NgramsElement' as the values of the tree.
-- We can't use a single function to \"rule them all\" because the 'NgramsRepoElement', that
-- the 'NodeStory' uses does not have an 'ngrams' we can use as the key when building and
-- destroying a forest.
buildForest :: Map NgramsTerm NgramsElement -> Forest NgramsElement
buildForest mp = unfoldForest mkTreeNode (Map.toList mp)
where
mkTreeNode :: (NgramsTerm, NgramsElement) -> (NgramsElement, [(NgramsTerm, NgramsElement)])
mkTreeNode (_, el) = (el, mapMaybe findChildren $ mSetToList (_ne_children el))
findChildren :: NgramsTerm -> Maybe (NgramsTerm, NgramsElement)
findChildren t = Map.lookup t mp <&> \el -> (t, el)
buildForest = map (fmap snd) . NodeStory.buildForest
-- | Folds an Ngrams forest back to a table map.
-- This function doesn't aggregate information, but merely just recostructs the original
......@@ -485,22 +485,13 @@ destroyForest f = Map.fromList . map (foldTree destroyTree) $ f
squashElements :: NgramsElement -> [(NgramsTerm, NgramsElement)] -> NgramsElement
squashElements r _ = r
-- | Prunes the input 'Forest' of 'NgramsElement' by keeping only the roots, i.e. the
-- nodes which has no children /AND/ they do not appear in any other 'children' relationship.
-- /NOTE ON IMPLEMENTATION:/ The fast way to do this is to simply filter each tree, ensuring
-- that we keep only trees which root has no parent or root (i.e. it's a root itself!) and this
-- will work only under the assumption that the input 'Forest' has been built correctly, i.e.
-- with the correct relationships specified, or this will break.
pruneForest :: Forest NgramsElement -> Forest NgramsElement
pruneForest = filter (\(Node r _) -> isNothing (_ne_parent r))
-- | TODO Errors management
-- TODO: polymorphic for Annuaire or Corpus or ...
-- | Table of Ngrams is a ListNgrams formatted (sorted and/or cut).
-- TODO: should take only one ListId
-- | /pure/ function to query a 'Map NgramsTerm NgramsElement', according to a
-- search function. Returns a /versioned/ 'NgramsTable' which is paginated and
-- search function. Returns a /versioned/ 'NgramsTable' which is paginated and
-- sorted according to the input 'NgramsSearchQuery', together with the
-- occurrences of the elements.
searchTableNgrams :: Versioned (Map NgramsTerm NgramsElement)
......
......@@ -43,27 +43,31 @@ TODO:
{-# LANGUAGE Arrows #-}
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE ScopedTypeVariables #-}
module Gargantext.Core.NodeStory
( module Gargantext.Core.NodeStory.Types
, getNodesArchiveHistory
, Archive(..)
, ArchiveStateForest
, nodeExists
, getNodesIdWithType
, mkNodeStoryEnv
, upsertNodeStories
-- , getNodeStory
, getNodeStory'
, nodeStoriesQuery
, currentVersion
, archiveStateFromList
, archiveStateToList
, fixNodeStoryVersions
, fixChildrenDuplicatedAsParents
, getParentsChildren )
where
import Control.Lens ((%~), non, _Just, at, over)
, getParentsChildren
-- * Operations on trees and forests
, buildForest
, pruneForest
) where
import Control.Lens ((%~), non, _Just, at, over, Lens')
import Data.ListZipper
import Data.Map.Strict qualified as Map
import Data.Set qualified as Set
import Database.PostgreSQL.Simple qualified as PGS
......@@ -77,6 +81,73 @@ import Gargantext.Database.Admin.Types.Node ( ListId, NodeId(..) )
import Gargantext.Database.Admin.Config ()
import Gargantext.Database.Prelude
import Gargantext.Prelude hiding (to)
import Data.Tree
class HasNgramChildren e where
ngramsElementChildren :: Lens' e (MSet NgramsTerm)
instance HasNgramChildren NgramsRepoElement where
ngramsElementChildren = nre_children
instance HasNgramChildren NgramsElement where
ngramsElementChildren = ne_children
class HasNgramParent e where
ngramsElementParent :: Lens' e (Maybe NgramsTerm)
instance HasNgramParent NgramsRepoElement where
ngramsElementParent = nre_parent
instance HasNgramParent NgramsElement where
ngramsElementParent = ne_parent
-- | A 'Forest' (i.e. a list of trees) that models a hierarchy of ngrams terms, possibly grouped in
-- a nested fashion, all wrapped in a 'Zipper'. Why using a 'Zipper'? Because when traversing the
-- forest (for example to fix the children in case of dangling imports) we need sometimes to search
-- things into the forest, but crucially we do not want to search also inside the tree we are
-- currently iterating on! A zipper gives exactly that, i.e. a way to \"focus\" only on a particular
-- piece of a data structure.
type ArchiveStateForest = ListZipper (Tree (NgramsTerm, NgramsRepoElement))
buildForestsFromArchiveState :: NgramsState' -> Map Ngrams.NgramsType (Forest (NgramsTerm, NgramsRepoElement))
buildForestsFromArchiveState = Map.map buildForest
destroyArchiveStateForest :: Map Ngrams.NgramsType (Forest (NgramsTerm, NgramsRepoElement)) -> NgramsState'
destroyArchiveStateForest = Map.map destroyForest
-- | Builds an ngrams forest from the input ngrams table map.
buildForest :: forall e. HasNgramChildren e => Map NgramsTerm e -> Forest (NgramsTerm, e)
buildForest mp = unfoldForest mkTreeNode (Map.toList mp)
where
mkTreeNode :: (NgramsTerm, e) -> ((NgramsTerm, e), [(NgramsTerm, e)])
mkTreeNode (k, el) = ((k, el), mapMaybe findChildren $ mSetToList (el ^. ngramsElementChildren))
findChildren :: NgramsTerm -> Maybe (NgramsTerm, e)
findChildren t = Map.lookup t mp <&> \el -> (t, el)
-- | Folds an Ngrams forest back to a table map.
-- This function doesn't aggregate information, but merely just recostructs the original
-- map without loss of information. To perform operations on the forest, use the appropriate
-- functions.
destroyForest :: Forest (NgramsTerm, NgramsRepoElement) -> Map NgramsTerm NgramsRepoElement
destroyForest f = Map.fromList . map (foldTree destroyTree) $ f
where
destroyTree :: (NgramsTerm, NgramsRepoElement)
-> [(NgramsTerm, NgramsRepoElement)]
-> (NgramsTerm, NgramsRepoElement)
destroyTree (k, rootEl) childrenEl = (k, squashElements rootEl childrenEl)
squashElements :: e -> [(NgramsTerm, e)] -> e
squashElements r _ = r
-- | Prunes the input 'Forest' of 'NgramsElement' by keeping only the roots, i.e. the
-- nodes which has no children /AND/ they do not appear in any other 'children' relationship.
-- /NOTE ON IMPLEMENTATION:/ The fast way to do this is to simply filter each tree, ensuring
-- that we keep only trees which root has no parent or root (i.e. it's a root itself!) and this
-- will work only under the assumption that the input 'Forest' has been built correctly, i.e.
-- with the correct relationships specified, or this will break.
pruneForest :: HasNgramParent e => Forest e -> Forest e
pruneForest = filter (\(Node r _) -> isNothing (r ^. ngramsElementParent))
getNodeStory' :: NodeId -> DBQuery err x ArchiveList
......@@ -94,15 +165,6 @@ getNodeStory' nId = do
-- `node_id`, `version` and there is a M2M table
-- `node_stories_ngrams` without the `version` colum? Then we would
-- have `version` in only one place.
{-
let versionsS = Set.fromList $ map (\a -> a ^. a_version) dbData
if Set.size versionsS > 1 then
panic $ Text.pack $ "[getNodeStory] versions for " <> show nodeId <> " differ! " <> show versionsS
else
pure ()
-}
pure $ foldl' combine initArchive dbData
where
-- NOTE (<>) for Archive doesn't concatenate states, so we have to use `combine`
......@@ -174,29 +236,18 @@ updateNodeStory nodeId currentArchive newArchive = do
--printDebug "[updateNodeStory] delete applied" ()
updateArchiveStateList nodeId (newArchive ^. a_version) updates
--printDebug "[updateNodeStory] update applied" ()
pure ()
-- where
-- update = Update { uTable = nodeStoryTable
-- , uUpdateWith = updateEasy (\(NodeStoryDB { node_id }) ->
-- NodeStoryDB { archive = sqlValueJSONB $ Archive { _a_history = emptyHistory
-- , ..}
-- , .. })
-- , uWhere = (\row -> node_id row .== sqlInt4 nId)
-- , uReturning = rCount }
upsertNodeStories :: NodeId -> ArchiveList -> DBUpdate err ()
upsertNodeStories nodeId newArchive = do
-- printDebug "[upsertNodeStories] START nId" nId
-- printDebug "[upsertNodeStories] locking nId" nId
-- printDebug "[upsertNodeStories] locking nId" nId
(NodeStory m) <- getNodeStory nodeId
case Map.lookup nodeId m of
Nothing -> do
_ <- insertNodeStory nodeId newArchive
pure ()
Just currentArchive -> do
_ <- updateNodeStory nodeId currentArchive newArchive
pure ()
Nothing ->
void $ insertNodeStory nodeId newArchive
Just currentArchive ->
void $ updateNodeStory nodeId currentArchive newArchive
-- 3. Now we need to set versions of all node state to be the same
updateNodeStoryVersion nodeId newArchive
......@@ -216,6 +267,7 @@ nodeStoryInc ns@(NodeStory nls) nId = do
-- `nre_parent` and `nre_children`. We want to make sure that all
-- children entries (i.e. ones that have `nre_parent`) have the same
-- `list` as their parent entry.
-- NOTE(adn) Currently unused, see !424 for context.
_fixChildrenInNgrams :: NgramsState' -> NgramsState'
_fixChildrenInNgrams ns = archiveStateFromList $ nsParents <> nsChildrenFixed
where
......@@ -233,29 +285,56 @@ _fixChildrenInNgrams ns = archiveStateFromList $ nsParents <> nsChildrenFixed
-- | (#281) Sometimes, when we upload a new list, a child can be left
-- without a parent. Find such ngrams and set their 'root' and
-- 'parent' to 'Nothing'.
_fixChildrenWithNoParent :: NgramsState' -> NgramsState'
_fixChildrenWithNoParent ns = archiveStateFromList $ nsParents <> nsChildrenFixed
fixChildrenWithNoParent :: Map Ngrams.NgramsType (Forest (NgramsTerm, NgramsRepoElement))
-> Map Ngrams.NgramsType (Forest (NgramsTerm, NgramsRepoElement))
fixChildrenWithNoParent = Map.map go
where
(nsParents, nsChildren) = getParentsChildren ns
parentNtMap = Map.fromList $ (\(_nt, t, nre) -> (t, nre ^. nre_children & mSetToSet)) <$> nsParents
nsChildrenFixFunc (nt, t, nre) =
( nt
, t
, nre { _nre_root = root
, _nre_parent = parent }
)
where
(root, parent) = case parentNtMap ^. at (nre ^. nre_parent . _Just) . _Just . at t of
Just _ -> (nre ^. nre_root, nre ^. nre_parent)
Nothing -> (Nothing, Nothing)
nsChildrenFixed = nsChildrenFixFunc <$> nsChildren
-- If the forest is somehow empty, do nothing. Otherwise, build a zipper and run
-- the algorithm.
go :: Forest (NgramsTerm, NgramsRepoElement)
-> Forest (NgramsTerm, NgramsRepoElement)
go fs = case zipper fs of
Nothing -> fs
Just zfs -> maybe mempty toList $ execListZipperOp fixDanglingChildrenInForest zfs
fixDanglingChildrenInForest :: ListZipperOp (Tree (NgramsTerm, NgramsRepoElement)) ()
fixDanglingChildrenInForest = do
z@(ListZipper l a r) <- get
when (isOrphan (l <> r) a) $ void $ modifyFocus detachFromParent
unless (atEnd z) $ do
moveRight
fixDanglingChildrenInForest
detachFromParent :: Tree (NgramsTerm, NgramsRepoElement) -> Tree (NgramsTerm, NgramsRepoElement)
detachFromParent (Node (k,v) el) = Node (k, v & nre_root .~ Nothing & nre_parent .~ Nothing) el
isOrphan :: Forest (NgramsTerm, NgramsRepoElement)
-- ^ The rest of the forest, i.e. the list of trees
-- except the input one.
-> Tree (NgramsTerm, NgramsRepoElement)
-- ^ The tree we are currently focusing.
-> Bool
-- ^ True if the root of the tree refers to
-- a node that is not listed as any children of
-- the subtrees, and yet it has a non-null parent
-- or root.
isOrphan restOfTheForest (Node (k,v) _) =
(isJust (_nre_parent v) || isJust (_nre_root v)) &&
not (isChildInForest (k,v) restOfTheForest)
-- | Returns 'True' if the input child can be found in the tree.
isChildInForest :: Eq e => e -> Forest e -> Bool
isChildInForest _ [] = False
isChildInForest e (x:xs) =
case e == rootLabel x of
True -> True
False -> isChildInForest e (subForest x) || isChildInForest e xs
-- | Sometimes children can also become parents (e.g. #313). Find such
-- | children and remove them from the list.
fixChildrenDuplicatedAsParents :: NgramsState' -> NgramsState'
fixChildrenDuplicatedAsParents ns = archiveStateFromList $ nsChildren <> nsParentsFixed
-- NOTE(adn) Currently unused, see !424 for context.
_fixChildrenDuplicatedAsParents :: NgramsState' -> NgramsState'
_fixChildrenDuplicatedAsParents ns = archiveStateFromList $ nsChildren <> nsParentsFixed
where
(nsParents, nsChildren) = getParentsChildren ns
parentNtMap = Map.fromList $ (\(_nt, t, nre) -> (t, nre ^. nre_children & mSetToSet)) <$> nsParents
......@@ -280,30 +359,18 @@ getParentsChildren ns = (nsParents, nsChildren)
mkNodeStoryEnv :: NodeStoryEnv err
mkNodeStoryEnv = do
-- tvar <- nodeStoryVar pool Nothing []
let saver_immediate nId a = do
-- ns <- atomically $
-- readTVar tvar
-- -- fix children so their 'list' is the same as their parents'
-- >>= pure . fixChildrenTermTypes
-- -- fix children that don't have a parent anymore
-- >>= pure . fixChildrenWithNoParent
-- >>= writeTVar tvar
-- >> readTVar tvar
--printDebug "[mkNodeStorySaver] will call writeNodeStories, ns" ns
-- writeNodeStories c $ fixChildrenWithNoParent $ fixChildrenTermTypes ns
-- |NOTE Fixing a_state is kinda a hack. We shouldn't land
-- |with bad state in the first place.
upsertNodeStories nId $
a & a_state %~ identity
a & a_state %~ (
destroyArchiveStateForest
. fixChildrenWithNoParent
. buildForestsFromArchiveState
)
let archive_saver_immediate nId a = do
insertNodeArchiveHistory nId (a ^. a_version) $ reverse $ a ^. a_history
pure $ a & a_history .~ []
-- mapM_ (\(nId, a) -> do
-- insertNodeArchiveHistory c nId (a ^. a_version) $ reverse $ a ^. a_history
-- ) $ Map.toList nls
-- pure $ clearHistory ns
NodeStoryEnv { _nse_saver = saver_immediate
, _nse_archive_saver = archive_saver_immediate
......@@ -351,72 +418,3 @@ fixNodeStoryVersions = runDBTx $ do
[PGS.Only (Just maxVersion)] -> do
void $ mkPGUpdate updateVerQuery (maxVersion, nId, ngramsType)
_ -> panicTrace "Should get only 1 result!"
-----------------------------------------
-- DEPRECATED
-- nodeStoryVar :: Pool PGS.Connection
-- -> Maybe (TVar NodeListStory)
-- -> [NodeId]
-- -> IO (TVar NodeListStory)
-- nodeStoryVar pool Nothing nIds = do
-- state' <- withResource pool $ \c -> nodeStoryIncrementalRead c Nothing nIds
-- atomically $ newTVar state'
-- nodeStoryVar pool (Just tv) nIds = do
-- nls <- atomically $ readTVar tv
-- nls' <- withResource pool
-- $ \c -> nodeStoryIncrementalRead c (Just nls) nIds
-- _ <- atomically $ writeTVar tv nls'
-- pure tv
-- clearHistory :: NodeListStory -> NodeListStory
-- clearHistory (NodeStory ns) = NodeStory $ ns & (traverse . a_history) .~ emptyHistory
-- where
-- emptyHistory = [] :: [NgramsStatePatch']
-- fixChildrenWithNoParent :: NodeListStory -> NodeListStory
-- fixChildrenWithNoParent (NodeStory nls) =
-- NodeStory $ Map.fromList [ (nId, a & a_state %~ fixChildrenWithNoParentStatePatch)
-- | (nId, a) <- Map.toList nls ]
-- fixChildrenTermTypes :: NodeListStory -> NodeListStory
-- fixChildrenTermTypes (NodeStory nls) =
-- NodeStory $ Map.fromList [ (nId, a & a_state %~ fixChildrenInNgramsStatePatch)
-- | (nId, a) <- Map.toList nls ]
-- nodeStoryIncrementalRead :: PGS.Connection -> Maybe NodeListStory -> [NodeId] -> IO NodeListStory
-- nodeStoryIncrementalRead _ Nothing [] = pure $ NodeStory Map.empty
-- nodeStoryIncrementalRead c Nothing (ni:ns) = do
-- m <- getNodeStory c ni
-- nodeStoryIncrementalRead c (Just m) ns
-- nodeStoryIncrementalRead c (Just nls) ns = foldM (\m n -> nodeStoryInc c m n) nls ns
-- nodeStoryDec :: Pool PGS.Connection -> NodeListStory -> NodeId -> IO NodeListStory
-- nodeStoryDec pool ns@(NodeStory nls) ni = do
-- case Map.lookup ni nls of
-- Nothing -> do
-- _ <- nodeStoryRemove pool ni
-- pure ns
-- Just _ -> do
-- let ns' = Map.filterWithKey (\k _v -> k /= ni) nls
-- _ <- nodeStoryRemove pool ni
-- pure $ NodeStory ns'
------------------------------------
-- writeNodeStories :: PGS.Connection -> NodeListStory -> IO ()
-- writeNodeStories c (NodeStory nls) = do
-- mapM_ (\(nId, a) -> upsertNodeStories c nId a) $ Map.toList nls
-- nodeStoryRemove :: Pool PGS.Connection -> NodeId -> IO Int64
-- nodeStoryRemove pool (NodeId nId) = withResource pool $ \c -> runDelete c delete
-- where
-- delete = Delete { dTable = nodeStoryTable
-- , dWhere = (\row -> node_id row .== sqlInt4 nId)
-- , dReturning = rCount }
......@@ -27,14 +27,13 @@ module Test.API.UpdateList (
, mkNewWithForm
) where
import Control.Lens (mapped, over)
import Control.Monad.Fail (fail)
import Data.Aeson qualified as JSON
import Data.Aeson.QQ
import Data.ByteString.Lazy qualified as BL
import Data.Map.Strict qualified as Map
import Data.Map.Strict.Patch qualified as PM
import Data.Patch.Class
import Data.Set qualified as Set
import Data.Text qualified as T
import Data.Text.IO qualified as TIO
import Fmt
......@@ -65,6 +64,7 @@ import Gargantext.Database.Query.Facet qualified as Facet
import Gargantext.Prelude hiding (get)
import Network.Wai.Handler.Warp qualified as Wai
import Paths_gargantext (getDataFileName)
import Prelude qualified
import Servant.Client.Streaming
import System.FilePath
import Test.API.Prelude (checkEither, newCorpusForUser, newPrivateFolderForUser, alice)
......@@ -77,7 +77,6 @@ import Test.Hspec.Wai.JSON (json)
import Test.Types (JobPollHandle(..))
import Test.Utils (pollUntilWorkFinished, protectedJSON, withValidLogin, isJobFinished)
import Text.Printf (printf)
import qualified Prelude
uploadJSONList :: LogConfig
......@@ -109,6 +108,17 @@ uploadJSONList log_cfg port token cId pathToNgrams clientEnv = do
pure listId
-- | Compares the ngrams returned via the input IO action with the ones provided as
-- the 'ByteString'. Use this function with the 'json' quasi quoter to pass directly
-- a nicely-formatted JSON.
checkNgrams :: IO (Either ClientError (VersionedWithCount NgramsTable))
-> BL.ByteString
-> WaiSession () ()
checkNgrams rq expected = liftIO $ do
eng <- rq
case eng of
Left err -> fail (show err)
Right r -> Just r `shouldBe` JSON.decode expected
tests :: Spec
......@@ -232,48 +242,90 @@ tests = sequential $ aroundAll withTestDBAndPort $ beforeAllWith dbEnvSetup $ do
let log_cfg = (test_config testEnv) ^. gc_logging
withApplication app $ do
withValidLogin port "alice" (GargPassword "alice") $ \clientEnv token -> do
-- this term is imported from the .json file
let importedTerm = NgramsTerm "abelian group"
-- this is the new term, under which importedTerm will be grouped
let newTerm = NgramsTerm "new abelian group"
-- The test data has a single term called "abelian group". In this test
-- we will try grouping together "abelian group" and "new abelian group".
listId <- uploadJSONList log_cfg port token cId "test-data/ngrams/simple.json" clientEnv
let checkNgrams expected = do
eng <- liftIO $ runClientM (get_table_ngrams token cId APINgrams.Terms listId 10 Nothing (Just MapTerm) Nothing Nothing Nothing Nothing) clientEnv
case eng of
Left err -> fail (show err)
Right r ->
let real = over mapped (\nt -> ( nt ^. ne_ngrams
, mSetToList $ nt ^. ne_children ))
(r ^. vc_data . _NgramsTable) in
liftIO $ Set.fromList real `shouldBe` Set.fromList expected
-- The #313 error is about importedTerm being duplicated
-- in a specific case
checkNgrams [ (importedTerm, []) ]
let getNgrams = runClientM (get_table_ngrams token cId APINgrams.Terms listId 10 Nothing (Just MapTerm) Nothing Nothing Nothing Nothing) clientEnv
checkNgrams getNgrams [json| {"version":0
,"count":1
,"data":[
{"ngrams":"abelian group"
,"size":2
,"list":"MapTerm"
,"root":null
,"parent":null
,"occurrences":[]
,"children":[]
}
]
}
|]
let nre = NgramsRepoElement 1 MapTerm Nothing Nothing (MSet mempty)
let patch = PM.fromList [
( newTerm
( "new abelian group"
, NgramsReplace { _patch_old = Nothing
, _patch_new = Just nre } )
]
_ <- liftIO $ runClientM (put_table_ngrams token cId APINgrams.Terms listId (Versioned 1 $ NgramsTablePatch $ fst patch)) clientEnv
-- check that new term is added (with no parent)
checkNgrams [ (newTerm, [])
, (importedTerm, []) ]
checkNgrams getNgrams [json| { "version": 1
,"count":2
,"data":[
{"ngrams":"abelian group"
,"size":2
,"list":"MapTerm"
,"root":null
,"parent":null
,"occurrences":[]
,"children":[]
},
{"ngrams":"new abelian group"
,"size":1
,"list":"MapTerm"
,"root":null
,"parent":null
,"occurrences":[]
,"children":[]
}
]
}
|]
-- now patch it so that we have a group
let patchChildren = PM.fromList [
( newTerm
, toNgramsPatch [importedTerm] )
( "new abelian group"
, toNgramsPatch ["abelian group"] )
]
_ <- liftIO $ runClientM (put_table_ngrams token cId APINgrams.Terms listId (Versioned 32 $ NgramsTablePatch $ fst patchChildren)) clientEnv
-- check that new term is parent of old one
checkNgrams [ (newTerm, [importedTerm]) ]
checkNgrams getNgrams [json| {"version": 2
,"count":2
,"data":[
{"ngrams":"abelian group"
,"size":2
,"list":"MapTerm"
,"root": "new abelian group"
,"parent": "new abelian group"
,"occurrences":[]
,"children":[]
},
{"ngrams":"new abelian group"
,"size":1
,"list":"MapTerm"
,"root":null
,"parent":null
,"occurrences":[]
,"children":["abelian group"]
}
]
}
|]
-- finally, upload the list again, the group should be as
-- it was before (the bug in #313 was that "abelian group"
......@@ -281,14 +333,31 @@ tests = sequential $ aroundAll withTestDBAndPort $ beforeAllWith dbEnvSetup $ do
_ <- uploadJSONList log_cfg port token cId "test-data/ngrams/simple.json" clientEnv
-- old (imported) term shouldn't become parentless
-- (#313 error was that we had [newTerm, importedTerm] instead)
-- NOTE: Unfortunately, I'm not able to reproduce this
-- error here, though I tried. Something is missing, maybe
-- some nodestory integration with tests?
checkNgrams [ (newTerm, [importedTerm]) ]
pure ()
-- (#313 error was that we had ["new abelian group", "abelian group"] instead)
-- In essence, this JSON needs to be exactly the same as the previous one,
-- i.e. important doesn't change the topology.
checkNgrams getNgrams [json| {"version": 2
,"count":2
,"data":[
{"ngrams":"abelian group"
,"size":2
,"list":"MapTerm"
,"root": "new abelian group"
,"parent": "new abelian group"
,"occurrences":[]
,"children":[]
},
{"ngrams":"new abelian group"
,"size":1
,"list":"MapTerm"
,"root":null
,"parent":null
,"occurrences":[]
,"children":["abelian group"]
}
]
}
|]
describe "POST /api/v1.0/lists/:id/csv/add/form/async (CSV)" $ do
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment