Commit 91b97fbd authored by Alexandre Delanoë's avatar Alexandre Delanoë

[CLEAN] readFile homonyms

parent 96f02a40
Pipeline #3127 passed with stage
in 92 minutes and 4 seconds
......@@ -40,7 +40,7 @@ main = do
--let q = ["water", "scarcity", "morocco", "shortage","flood"]
let q = ["gratuit", "gratuité", "culture", "culturel"]
eDocs <- CSV.readFile rPath
eDocs <- CSV.readCSVFile rPath
case eDocs of
Right (h, csvDocs) -> do
putStrLn $ "Number of documents before:" <> show (V.length csvDocs)
......
......@@ -42,7 +42,7 @@ import Gargantext.Core.Types
import Gargantext.Core.Text.Terms
import Gargantext.Core.Text.Context
import Gargantext.Core.Text.Terms.WithList
import Gargantext.Core.Text.Corpus.Parsers.CSV (readFile, csv_title, csv_abstract, csv_publication_year, unIntOrDec, fromMIntOrDec, defaultYear)
import Gargantext.Core.Text.Corpus.Parsers.CSV (readCSVFile, csv_title, csv_abstract, csv_publication_year, unIntOrDec, fromMIntOrDec, defaultYear)
import Gargantext.Core.Text.List.Formats.CSV (csvMapTermList)
import Gargantext.Core.Text.Terms (terms)
import Gargantext.Core.Text.Metrics.Count (coocOnContexts, Coocs)
......@@ -86,7 +86,7 @@ main = do
[corpusFile, termListFile, outputFile] <- getArgs
--corpus :: IO (DM.IntMap [[Text]])
eCorpusFile <- readFile corpusFile
eCorpusFile <- readCSVFile corpusFile
case eCorpusFile of
Right cf -> do
let corpus = DM.fromListWith (<>)
......
......@@ -109,7 +109,7 @@ csvToDocs parser patterns time path =
(termsInText patterns $ (csv_title row) <> " " <> (csv_abstract row))
Nothing
[]
) <$> snd <$> either (\err -> panic $ cs $ "CSV error" <> (show err)) identity <$> Csv.readFile path
) <$> snd <$> either (\err -> panic $ cs $ "CSV error" <> (show err)) identity <$> Csv.readCSVFile path
Csv' limit -> Vector.toList
<$> Vector.take limit
<$> Vector.map (\row -> Document (toPhyloDate (csv'_publication_year row) (csv'_publication_month row) (csv'_publication_day row) time)
......
......@@ -76,7 +76,7 @@ fileDownload uId nId = do
let (HyperdataFile { _hff_name = name'
, _hff_path = path }) = node ^. node_hyperdata
Contents c <- GargDB.readFile $ unpack path
Contents c <- GargDB.readGargFile $ unpack path
let (mMime, _) = DMT.guessType DMT.defaultmtd False $ unpack name'
mime = case mMime of
......
......@@ -234,7 +234,7 @@ delimiter Comma = fromIntegral $ ord ','
------------------------------------------------------------------------
readCsvOn' :: [CsvDoc -> Text] -> FilePath -> IO (Either Prelude.String [Text])
readCsvOn' fields fp = do
r <- readFile fp
r <- readCSVFile fp
pure $ ( V.toList
. V.map (\l -> intercalate (pack " ") $ map (\field -> field l) fields)
. snd ) <$> r
......@@ -267,8 +267,8 @@ readByteStringStrict d ff = (readByteStringLazy d ff) . BL.fromStrict
------------------------------------------------------------------------
-- | TODO use readFileLazy
readFile :: FilePath -> IO (Either Prelude.String (Header, Vector CsvDoc))
readFile fp = do
readCSVFile :: FilePath -> IO (Either Prelude.String (Header, Vector CsvDoc))
readCSVFile fp = do
result <- fmap (readCsvLazyBS Comma) $ BL.readFile fp
case result of
Left _err -> fmap (readCsvLazyBS Tab) $ BL.readFile fp
......@@ -448,7 +448,7 @@ parseHal' bs = (V.toList . V.map csvHal2doc . snd) <$> readCsvHalLazyBS bs
------------------------------------------------------------------------
parseCsv :: FilePath -> IO (Either Prelude.String [HyperdataDocument])
parseCsv fp = fmap (V.toList . V.map csv2doc . snd) <$> readFile fp
parseCsv fp = fmap (V.toList . V.map csv2doc . snd) <$> readCSVFile fp
{-
parseCsv' :: BL.ByteString -> Either Prelude.String [HyperdataDocument]
......
......@@ -140,13 +140,13 @@ writeFile a = do
---
-- | Example to read a file with Type
readFile :: ( MonadReader env m
readGargFile :: ( MonadReader env m
, HasConfig env
, MonadBase IO m
, ReadFile a
)
=> FilePath -> m a
readFile fp = do
readGargFile fp = do
dataPath <- view $ hasConfig . gc_datafilepath
liftBase $ readFile' $ toFilePath dataPath fp
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment