Bashql.hs 6.72 KB
Newer Older
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60
{-|
Module      : Gargantext.Database.Bashql
Description : BASHQL to deal with Gargantext Database.
Copyright   : (c) CNRS, 2017-Present
License     : AGPL + CECILL v3
Maintainer  : team@gargantext.org
Stability   : experimental
Portability : POSIX

* BASHQL is a Domain Specific Language to deal with the Database

* BASHQL = functional (Bash * SQL)

* Which language to chose when working with a database ? To make it
simple, instead of all common Object Relational Mapping (ORM) [1]
strategy used nowadays inspired more by object logic than functional
logic, the semantics of BASHQL with focus on the function first.

* BASHQL focus on the function, i.e. use bash language function name,
and make it with SQL behind the scene. Then BASHQL is inspired more
by Bash language [2] than SQL and then follows its main commands as
specification and documentation.

* Main arguments:
  1. Theoritical: database and FileSystems are each thought as a single
  category, assumption based on theoretical work on databases by David Spivak [0].
  2. Practical argument: basic bash commands are a daily practice among
  developper community.

* How to help ?
  1. Choose a command you like in Bash
  2. Implement it in Haskell-SQL according to Gargantext Shema (Tree like
  filesystem)
  3. Translate it in BASHQL (follow previous implementations)
  4. Make a pull request    (enjoy the community)

* Implementation strategy: Functional adapations are made to the
gargantext languages options and SQL optimization are done continuously
during the project. For the Haskellish part, you may be inspired by
Turtle implementation written by Gabriel Gonzales [3] which shows how to
write Haskell bash translations.

* Semantics
- FileSystem is now a NodeSystem where each File is a Node in a Directed Graph (DG).

* References

[0] MIT Press has published "Category theory for the sciences". The book
can also be purchased on Amazon. Here are reviews by the MAA, by the
AMS, and by SIAM.

[1] https://en.wikipedia.org/wiki/Object-relational_mapping

[2] https://en.wikipedia.org/wiki/Bash_(Unix_shell)

[3] https://github.com/Gabriel439/Haskell-Turtle-Library

-}

{-# LANGUAGE NoImplicitPrelude #-}
Alexandre Delanoë's avatar
Alexandre Delanoë committed
61
{-# LANGUAGE FlexibleContexts  #-}
62

Alexandre Delanoë's avatar
Alexandre Delanoë committed
63
module Gargantext.Database.Bashql ( get, get'
64 65 66 67 68 69 70 71 72
                                  , ls  , ls'
                                  , home, home'
                                  , post, post'
                                  , del , del'
                                  , tree, tree'
                                  , postCorpus, postAnnuaire
                                 )
    where

Alexandre Delanoë's avatar
Alexandre Delanoë committed
73 74
import Control.Monad.Reader -- (Reader, ask)

75 76 77 78 79
import Database.PostgreSQL.Simple (Connection)
import Data.Text (Text, pack)
import Data.Aeson
import Data.List (last, concat)

Alexandre Delanoë's avatar
Alexandre Delanoë committed
80 81 82 83 84 85
import Gargantext.Core.Types
import Gargantext.Database.Utils (connectGargandb)
import Gargantext.Database.Node
import Gargantext.Prelude

import Opaleye hiding (FromField)
86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106
--type UserId = Int
--type NodeId = Int

-- List of NodeId
-- type PWD a = PWD UserId [a]
type PWD = [NodeId]
--data PWD' a = a | PWD' [a]

-- | TODO get Children or Node
get :: Connection -> PWD -> IO [Node Value]
get _ [] = pure []
get conn pwd = runQuery conn $ selectNodesWithParentID (last pwd)

-- | Home, need to filter with UserId
home :: Connection -> IO PWD
home c = map node_id <$> getNodesWithParentId c 0 Nothing

-- | ls == get Children
ls :: Connection -> PWD -> IO [Node Value]
ls = get

Alexandre Delanoë's avatar
Alexandre Delanoë committed
107

108 109
tree :: Connection -> PWD -> IO [Node Value]
tree c p = do
Alexandre Delanoë's avatar
Alexandre Delanoë committed
110
  ns       <- get c p
111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147
  children <- mapM (\p' -> get c [p']) $ map node_id ns
  pure $ ns <> (concat children)


-- | TODO
post :: Connection -> PWD -> [NodeWrite'] -> IO Int64
post _ [] _   = pure 0
post _ _ []   = pure 0
post c pth ns = mkNode c (last pth) ns

--postR :: Connection -> PWD -> [NodeWrite'] -> IO [Int]
--postR _ [] _   = pure [0]
--postR _ _ []   = pure [0]
--postR c pth ns = mkNodeR c (last pth) ns
--

--rm :: Connection -> PWD -> [NodeId] -> IO Int
--rm = del

del :: Connection -> [NodeId] -> IO Int
del _ [] = pure 0
del c ns = deleteNodes c ns

-- | TODO
--put :: Connection -> PWD -> [a] -> IO Int64
--put = undefined

-- | TODO
-- cd (Home UserId) | (Node NodeId)
-- cd Path
-- jump NodeId
-- touch Dir

--------------------------------------------------------------
-- Tests
--------------------------------------------------------------

Alexandre Delanoë's avatar
Alexandre Delanoë committed
148 149 150 151 152 153 154

get' :: PWD -> Reader Connection (IO [Node Value])
get' []  = pure $ pure []
get' pwd = do
  connection <- ask
  pure $ runQuery connection $ selectNodesWithParentID (last pwd)

155 156 157 158 159
home' :: IO PWD
home' = do
  c <- connectGargandb "gargantext.ini"
  home c

Alexandre Delanoë's avatar
Alexandre Delanoë committed
160 161 162 163 164 165
--home'' :: Reader Connection (IO PWD)
--home'' = do
--  c <- ask
--  liftIO $ home c


166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223
ls' :: IO [Node Value]
ls' = do
  c <- connectGargandb "gargantext.ini"
  h <- home c
  ls c h

tree' :: IO [Node Value]
tree' = do
  c <- connectGargandb "gargantext.ini"
  h <- home c
  tree c h

post' :: IO [Int]
post'  = do
  c   <- connectGargandb "gargantext.ini"
  pid <- last <$> home c
  let uid = 1
  postNode c uid pid ( Node' NodeCorpus  (pack "Premier corpus") (toJSON (pack "{}"::Text)) [ Node' Document (pack "Doc1") (toJSON (pack "{}" :: Text)) []
                                                          , Node' Document (pack "Doc2") (toJSON (pack "{}" :: Text)) []
                                                          , Node' Document (pack "Doc3") (toJSON (pack "{}" :: Text)) []
                                                          ]
                     )

type CorpusName = Text

-- | 
-- myCorpus <- Prelude.map doc2hyperdataDocument <$> toDocs <$> snd <$> readCsv "doc/corpus_imt/Gargantext_Corpus_small.csv"
-- There is an error in the CSV parsing...
-- let myCorpus' = Prelude.filter (\n -> T.length (maybe "" identity (hyperdataDocument_title n)) > 30) myCorpus

postCorpus :: ToJSON a => CorpusName -> (a -> Text) -> [a] -> IO [Int]
postCorpus corpusName title ns = do
  c   <- connectGargandb "gargantext.ini"
  pid <- last <$> home c
  let uid = 1
  postNode c uid pid ( Node' NodeCorpus  corpusName (toJSON (pack "{}"::Text))
                             (map (\n -> Node' Document (title n) (toJSON n) []) ns)
                     )

-- | 
-- import IMTClient as C
-- postAnnuaire "Annuaire IMT" (\n -> (maybe "" identity (C.prenom n)) <> " " <> (maybe "" identity (C.nom n))) (take 30 annuaire)
postAnnuaire :: ToJSON a => CorpusName -> (a -> Text) -> [a] -> IO [Int]
postAnnuaire corpusName title ns = do
  c   <- connectGargandb "gargantext.ini"
  pid <- last <$> home c
  let uid = 1
  postNode c uid pid ( Node' Annuaire  corpusName (toJSON (pack "{}"::Text))
                             (map (\n -> Node' UserPage (title n) (toJSON n) []) ns)
                     )


del' :: [NodeId] -> IO Int
del' ns = do
  c <- connectGargandb "gargantext.ini"
  del c ns


Alexandre Delanoë's avatar
Alexandre Delanoë committed
224
-- corporaOf :: Username -> IO [Corpus]