Commit b04c7f62 authored by Andrew Gibiansky's avatar Andrew Gibiansky

something work i think (???)

parent 67e98e69
...@@ -93,9 +93,6 @@ executable IHaskell ...@@ -93,9 +93,6 @@ executable IHaskell
extensions: DoAndIfThenElse extensions: DoAndIfThenElse
NoImplicitPrelude
OverloadedStrings
-- Other library packages from which modules are imported. -- Other library packages from which modules are imported.
build-depends: base ==4.6.*, build-depends: base ==4.6.*,
...@@ -126,5 +123,5 @@ Test-Suite doctests ...@@ -126,5 +123,5 @@ Test-Suite doctests
Type: exitcode-stdio-1.0 Type: exitcode-stdio-1.0
Ghc-Options: -threaded Ghc-Options: -threaded
Main-Is: rundoctests.hs Main-Is: rundoctests.hs
Build-Depends: base, doctest >= 0.8, process Build-Depends: base, doctest >= 0.8, process, text ==0.11.*, shelly ==1.3.*, MissingH ==1.2.*
{-# LANGUAGE NoImplicitPrelude, OverloadedStrings #-}
{-# LANGUAGE QuasiQuotes #-} {-# LANGUAGE QuasiQuotes #-}
-- | Description : IPython configuration files are compiled-into IHaskell -- | Description : IPython configuration files are compiled-into IHaskell
module IHaskell.Config (ipython, notebook, console, qtconsole, customjs, notebookJavascript) where module IHaskell.Config (ipython, notebook, console, qtconsole, customjs, notebookJavascript) where
......
{-# LANGUAGE NoImplicitPrelude, OverloadedStrings #-}
{-# LANGUAGE TypeSynonymInstances, FlexibleInstances #-} {-# LANGUAGE TypeSynonymInstances, FlexibleInstances #-}
module IHaskell.Display ( module IHaskell.Display (
IHaskellDisplay(..), IHaskellDisplay(..),
......
{-# LANGUAGE NoImplicitPrelude, OverloadedStrings #-}
{- | Description : generates tab-completion options {- | Description : generates tab-completion options
context-insensitive completion for what is probably context-insensitive completion for what is probably
......
{-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE NoImplicitPrelude, OverloadedStrings #-}
{-# LANGUAGE NoImplicitPrelude #-}
{- | Description : Wrapper around GHC API, exposing a single `evaluate` interface that runs {- | Description : Wrapper around GHC API, exposing a single `evaluate` interface that runs
a statement, declaration, import, or directive. a statement, declaration, import, or directive.
...@@ -35,6 +34,7 @@ import Module ...@@ -35,6 +34,7 @@ import Module
import qualified System.IO.Strict as StrictIO import qualified System.IO.Strict as StrictIO
import IHaskell.Types import IHaskell.Types
import IHaskell.Eval.Parser
data ErrorOccurred = Success | Failure data ErrorOccurred = Success | Failure
...@@ -79,21 +79,8 @@ makeWrapperStmts = (fileName, initStmts, postStmts) ...@@ -79,21 +79,8 @@ makeWrapperStmts = (fileName, initStmts, postStmts)
write :: GhcMonad m => String -> m () write :: GhcMonad m => String -> m ()
write x = when debug $ liftIO $ hPutStrLn stderr x write x = when debug $ liftIO $ hPutStrLn stderr x
type LineNumber = Int
type ColumnNumber = Int
type Interpreter = Ghc type Interpreter = Ghc
data DirectiveType = GetType String deriving Show
data Command
= Directive DirectiveType
| Import String
| Declaration String
| Statement String
| ParseError LineNumber ColumnNumber String
deriving Show
globalImports :: [String] globalImports :: [String]
globalImports = globalImports =
[ "import Prelude" [ "import Prelude"
...@@ -152,9 +139,11 @@ evaluate :: Int -- ^ The execution counter of this evaluat ...@@ -152,9 +139,11 @@ evaluate :: Int -- ^ The execution counter of this evaluat
-> Interpreter [DisplayData] -- ^ All of the output. -> Interpreter [DisplayData] -- ^ All of the output.
evaluate execCount code evaluate execCount code
| strip code == "" = return [] | strip code == "" = return []
| otherwise = joinDisplays <$> runUntilFailure (parseCommands (strip code) ++ [storeItCommand execCount]) | otherwise = do
cmds <- parseCommands (strip code)
joinDisplays <$> runUntilFailure (cmds ++ [storeItCommand execCount])
where where
runUntilFailure :: [Command] -> Interpreter [DisplayData] runUntilFailure :: [CodeBlock] -> Interpreter [DisplayData]
runUntilFailure [] = return [] runUntilFailure [] = return []
runUntilFailure (cmd:rest) = do runUntilFailure (cmd:rest) = do
(success, result) <- evalCommand cmd (success, result) <- evalCommand cmd
...@@ -178,8 +167,12 @@ joinDisplays displays = ...@@ -178,8 +167,12 @@ joinDisplays displays =
_ -> joinedPlains : other _ -> joinedPlains : other
parseCommands :: GhcMonad m => String -- ^ Code containing commands.
-> m [CodeBlock] -- ^ Commands contained in code string.
parseCommands = parseCell
{-
parseCommands :: String -- ^ Code containing commands. parseCommands :: String -- ^ Code containing commands.
-> [Command] -- ^ Commands contained in code string. -> [CodeBlock] -- ^ Commands contained in code string.
parseCommands code = joinMultilineDeclarations $ concatMap makeCommands pieces parseCommands code = joinMultilineDeclarations $ concatMap makeCommands pieces
where where
-- Group the text into different pieces. -- Group the text into different pieces.
...@@ -246,20 +239,21 @@ parseCommands code = joinMultilineDeclarations $ concatMap makeCommands pieces ...@@ -246,20 +239,21 @@ parseCommands code = joinMultilineDeclarations $ concatMap makeCommands pieces
':':'t':' ':expr -> Directive (GetType expr) ':':'t':' ':expr -> Directive (GetType expr)
other -> ParseError 0 0 $ "Unknown command: " ++ other ++ "." other -> ParseError 0 0 $ "Unknown command: " ++ other ++ "."
joinMultilineDeclarations :: [Command] -> [Command] joinMultilineDeclarations :: [CodeBlock] -> [CodeBlock]
joinMultilineDeclarations = map joinCommands . groupBy declaringSameFunction joinMultilineDeclarations = map joinCommands . groupBy declaringSameFunction
where where
joinCommands :: [Command] -> Command joinCommands :: [CodeBlock] -> CodeBlock
joinCommands [x] = x joinCommands [x] = x
joinCommands commands = Declaration . unlines $ map getDeclarationText commands joinCommands commands = Declaration . unlines $ map getDeclarationText commands
where where
getDeclarationText (Declaration text) = text getDeclarationText (Declaration text) = text
declaringSameFunction :: Command -> Command -> Bool declaringSameFunction :: CodeBlock -> CodeBlock -> Bool
declaringSameFunction (Declaration first) (Declaration second) = declared first == declared second declaringSameFunction (Declaration first) (Declaration second) = declared first == declared second
where declared :: String -> String where declared :: String -> String
declared = takeWhile (`notElem` (" \t\n:" :: String)) . strip declared = takeWhile (`notElem` (" \t\n:" :: String)) . strip
declaringSameFunction _ _ = False declaringSameFunction _ _ = False
-}
wrapExecution :: Interpreter [DisplayData] -> Interpreter (ErrorOccurred, [DisplayData]) wrapExecution :: Interpreter [DisplayData] -> Interpreter (ErrorOccurred, [DisplayData])
wrapExecution exec = ghandle handler $ exec >>= \res -> wrapExecution exec = ghandle handler $ exec >>= \res ->
...@@ -270,7 +264,7 @@ wrapExecution exec = ghandle handler $ exec >>= \res -> ...@@ -270,7 +264,7 @@ wrapExecution exec = ghandle handler $ exec >>= \res ->
-- | Return the display data for this command, as well as whether it -- | Return the display data for this command, as well as whether it
-- resulted in an error. -- resulted in an error.
evalCommand :: Command -> Interpreter (ErrorOccurred, [DisplayData]) evalCommand :: CodeBlock -> Interpreter (ErrorOccurred, [DisplayData])
evalCommand (Import importStr) = wrapExecution $ do evalCommand (Import importStr) = wrapExecution $ do
write $ "Import: " ++ importStr write $ "Import: " ++ importStr
importDecl <- parseImportDecl importStr importDecl <- parseImportDecl importStr
...@@ -278,7 +272,7 @@ evalCommand (Import importStr) = wrapExecution $ do ...@@ -278,7 +272,7 @@ evalCommand (Import importStr) = wrapExecution $ do
setContext $ IIDecl importDecl : context setContext $ IIDecl importDecl : context
return [] return []
evalCommand (Directive (GetType expr)) = wrapExecution $ do evalCommand (Directive GetType expr) = wrapExecution $ do
result <- exprType expr result <- exprType expr
dflags <- getSessionDynFlags dflags <- getSessionDynFlags
return [Display MimeHtml $ printf "<span style='font-weight: bold; color: green;'>%s</span>" $ showSDocUnqual dflags $ ppr result] return [Display MimeHtml $ printf "<span style='font-weight: bold; color: green;'>%s</span>" $ showSDocUnqual dflags $ ppr result]
......
{-# LANGUAGE NoImplicitPrelude, OverloadedStrings #-}
{-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE NoImplicitPrelude #-}
{- | Description : Inspect type and function information and documentation. {- | Description : Inspect type and function information and documentation.
......
{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE NoImplicitPrelude, OverloadedStrings #-}
module IHaskell.Eval.Parser ( module IHaskell.Eval.Parser (
chunkCode, parseCell,
CodeChunk(..), CodeBlock(..),
ChunkType(..), DirectiveType(..),
LineNumber,
ColumnNumber,
splitAtLoc
) where ) where
import ClassyPrelude hiding (liftIO, hGetContents) import ClassyPrelude hiding (liftIO)
import Data.String.Utils (startswith, strip)
import Prelude (init, last)
import FastString import FastString
import StringBuffer import StringBuffer
import ErrUtils import ErrUtils
import SrcLoc import SrcLoc
import GHC import GHC
import GhcMonad (liftIO)
import Bag import Bag
import Outputable hiding ((<>))
import Lexer import Lexer
import OrdList
import Data.String.Utils (strip, startswith)
import Data.List.Utils (grab) import IHaskell.GHC.HaskellParser
import Control.Monad.State
import Debug.Trace
-- | A chunk of code with with a source span and an associated chunk type.
data CodeChunk = Chunk RealSrcSpan ChunkType type LineNumber = Int
type ColumnNumber = Int
-- | Possible types of code chunks.
data ChunkType = Directive data CodeBlock
| Expr = Expression String
| Stmt | Declaration String
| Decl | Statement String
| Import | Import String
deriving (Eq, Show, Ord) | Directive DirectiveType String
| ParseError LineNumber ColumnNumber String
-- | Simple tree data structure. deriving Show
data Tree a = Branch [Tree a] | Leaf a deriving Show
data DirectiveType
-- | Delimiter categorization as an opening, closing, or neither opening = GetType
-- nor closing delimiter. Used to generate trees of tokens. | GetInfo
data DelimType = Opening | Closing | Neither deriving Show
-- | Put the given statements into a `do` block. -- $setup
wrapInDoBlock :: String -> String -- >>> import GHC
wrapInDoBlock codeStr = -- >>> import GHC.Paths
if null stripped -- >>> import IHaskell.Eval.Parser
then [] -- >>> let ghc = runGhc (Just libdir)
else unlines $ "do" : map indent (lines stripped) -- >>> let test = ghc . parseCell
-- $extendedParserTests
--
-- >>> test "let x = 3 in x + 3"
-- [Expression "let x = 3 in x + 3"]
--
-- >>> test "3\n:t expr"
-- [Expression "3",Directive GetType "expr"]
--
-- >>> test "3\nlet x = expr"
-- [Expression "3",Statement "let x = expr"]
--
-- >>> test "y <- do print 'no'\nlet x = expr"
-- [Statement "y <- do print 'no'",Statement "let x = expr"]
--
-- >>> test "y <- do print 'no'\nlet x = expr"
-- [Statement "y <- print 'no'",Statement "let x = expr"]
--
-- >>> test "print yes\nprint no"
-- [Expression "print yes",Statement "print no"]
-- | Parse a single cell into code blocks.
--
-- >>> test "let x = 3"
-- [Statement "let x = 3"]
--
-- >>> test ":type hello\n:in goodbye"
-- [Directive GetType "hello",Directive GetInfo "goodbye"]
--
-- >>> test "import Data.Monoid"
-- [Import "import Data.Monoid"]
--
-- >>> test "3 + 5"
-- [Expression "3 + 5"]
parseCell :: GhcMonad m => String -> m [CodeBlock]
parseCell codeString = concat <$> processChunks 1 [] chunks
where where
stripped = strip codeStr chunks = splitOnDirectives [] $ lines codeString
indent = (" " ++) parseChunk chunk line =
if isDirective chunk
-- | Convert a string of code into raw, uncleaned code chunks. then return [parseDirective chunk line]
classifyCode :: DynFlags -> String -> Either String [CodeChunk] else parseCell' chunk line
classifyCode flags codeStr = groupLikeChunks . treeToChunks . tokenTree <$> runLexer flags (wrapInDoBlock codeStr)
isDirective = startswith ":" . strip
-- | Group code chunks that are alike into one code chunk.
groupLikeChunks :: [CodeChunk] -> [CodeChunk] processChunks _ results [] = return $ reverse results
groupLikeChunks chunks = map joinChunks $ groupBy sameChunkType chunks processChunks line accum (chunk:remaining) = do
where block <- parseChunk chunk line
sameChunkType (Chunk _ firstType) (Chunk _ secondType) = firstType == secondType processChunks (line + nlines chunk) (block : accum) remaining
joinChunks :: [CodeChunk] -> CodeChunk splitOnDirectives results [] = reverse results
joinChunks [chunk] = chunk splitOnDirectives chunks (line:lines) =
joinChunks (Chunk firstLoc chunkType:rest) = Chunk newSpan chunkType if startswith ":" $ strip line
where newSpan = mkRealSrcSpan (realSrcSpanStart firstLoc) (realSrcSpanEnd restChunkLoc) then splitOnDirectives (line : chunks) lines
Chunk restChunkLoc _ = joinChunks rest else
let goodLines = takeWhile (not . startswith ":" . strip) (line:lines)
-- | Convert a list of tokens into a tree of tokens. remaining = drop (length goodLines) (line:lines) in
tokenTree :: [Located Lexer.Token] -> Tree (Located Lexer.Token) splitOnDirectives (unlines goodLines : chunks) remaining
tokenTree = toTree $ \x -> case x of
-- Opening delimiters are opening curly braces and opening parentheses. nlines = length . lines
L _ ITvocurly -> Opening
L _ IToparen -> Opening parseCell' :: GhcMonad m => String -> Int -> m [CodeBlock]
parseCell' code startLine = do
-- Closing delimiters are closing curly braces and closing parentheses. flags <- getSessionDynFlags
L _ ITvccurly -> Closing let parseResults = map tryParser (parsers flags)
L _ ITcparen -> Closing case rights parseResults of
[] -> return [ParseError startLine 0 "Failed"]
-- Everthing else isn't a delimiter. (result, used, remaining):_ -> do
_ -> Neither remainResult <- parseCell' remaining $ startLine + length (lines used)
return $ result : if null (strip remaining)
-- | Convert a list into a tree given a function that can classify each then []
-- element of the list as a delimiter (opening or closing) or not else remainResult
-- a delimiter.
toTree :: (a -> DelimType) -- ^ Function which classifies the delimiter type of a list element.
-> [a] -- ^ List of tokens.
-> Tree a -- ^ Tree generated from tokens where each set of delimiters encodes a new level.
toTree delimType tokens =
case toTree' delimType 0 [[]] tokens of
x -> Branch $ reverse x
where where
-- Helper function for tree conversion. tryParser :: (String -> CodeBlock, String -> (Either String String, String, String)) -> Either String (CodeBlock, String, String)
toTree' :: (a -> DelimType) -- Convert list element to a tree. tryParser (blockType, parser) = case parser code of
-> Int -- The level of the tree which is being parsed. (Left err, _, _) -> Left err
-> [[Tree a]] -- The currently parsed branches at every level. (Right res, used, remaining) -> Right (blockType res, used, remaining)
-- The first element is a list of branches parsers flags =
-- at the level currently being parsed, the [ (Import, strParser flags partialImport)
-- second element is the branches at the , (Expression, strParser flags partialExpression)
-- level above, and so on. , (Statement, strParser flags partialStatement)
-> [a] -- Remaining tokens. , (Declaration, lstParser flags partialDeclaration)
-> [Tree a] -- Branches of the output tree. ]
toTree' delimType n accum (token:rest) =
case delimType token of lstParser :: Outputable a => DynFlags -> P (OrdList a) -> String -> (Either String String, String, String)
-- If we see an opening delimiter, go down one level. lstParser flags parser code =
-- Reset the parsed things at the current level to nothing, since case runParser flags parser code of
-- we haven't parsed any tokens. Left err -> (Left err, code, "")
Opening -> toTree' delimType (n+1) ([] : accum) rest Right (out, used, remainingCode) -> (Right . showSDoc flags . ppr . fromOL $ out, used, remainingCode)
-- If we see a closing parenthesis, go back up one level. strParser :: Outputable a => DynFlags -> P a -> String -> (Either String String, String, String)
-- The level below just becomes a single parsed token at this level. strParser flags parser code =
Closing -> case accum of case runParser flags parser code of
sublevel : currentLevel : uplevels -> toTree' delimType (n-1) levels rest Left err -> (Left err, code, "")
where first = Branch $ reverse sublevel Right (out, used, remainingCode) -> (Right . showSDoc flags . ppr $ out, used, remainingCode)
currentLevelNodes = first : currentLevel
levels = currentLevelNodes : uplevels -- | Parse a directive of the form :directiveName.
parseDirective :: String -- ^ Directive string.
-- If we see something that isn't a delimiter, simply add it to the -> Int -- ^ Line number at which the directive appears.
-- current level of parsed nodes. -> CodeBlock -- ^ Directive code block or a parse error.
Neither -> case accum of parseDirective (':':directive) line = case find rightDirective directives of
currentLevel : uplevels -> toTree' delimType n ((Leaf token : currentLevel) : uplevels) rest Just (directiveType, _) -> Directive directiveType arg
where arg = unwords restLine
-- Once done parsing, return the branches. We're done paring because _:restLine = words directive
-- the remaining tokens are empty and because the level of the tree is Nothing -> ParseError line 0 $ "Unknown command: '" ++ directive ++ "'."
-- just zero (the top level).
toTree' _ 0 (a:_) [] = a
-- | Divide the code string into chunks. Each code chunk can be evaluated
-- separately.
chunkCode :: GhcMonad m => String -- ^ String containing code to parse and split.
-> m (Either String [(String, ChunkType)]) -- ^ Either an error string or a list of code chunks.
chunkCode codeString = do
flags <- getSessionDynFlags
let chunks = classifyCode flags codeString
return $ case chunks of
Right chunks -> Right $ evalState (extractDirectives chunks) $ lines codeString
Left str -> Left str
where where
-- Get number of lines in a source span. rightDirective (_, strings) = case words directive of
nlines :: RealSrcSpan -> Int [] -> False
nlines span = 1 + srcLocLine (realSrcSpanEnd span) - srcLocLine (realSrcSpanStart span) dir:_ -> dir `elem` strings
directives =
-- Extract all directives in this chunk. Convert a chunk into a list of [(GetType, ["t", "ty", "typ", "type"])
-- strings and their chunk types. ,(GetInfo, ["i", "in", "inf", "info"])
extractDirectives :: [CodeChunk] -> State [String] [(String, ChunkType)] ]
extractDirectives (Chunk span chunkType:rest) = do
spanLines <- grab $ nlines span -- | Run a GHC parser on a string.
next <- extractDirectives rest runParser :: DynFlags -> P a -> String -> Either String (a, String, String)
return $ catchDirectives spanLines chunkType ++ next runParser dflags parser str = toEither (unP parser (mkPState dflags buffer location))
where where
catchDirectives :: [String] -> ChunkType -> [(String, ChunkType)] filename = "<interactive>"
catchDirectives codeLines chunkType = case break isDirective codeLines of location = mkRealSrcLoc (mkFastString filename) 1 1
-- If there are no directives... buffer = stringToStringBuffer str
(allLines, []) -> [(unlines allLines, chunkType)]
(preLines, directiveLine:postLines) -> [(unlines preLines, chunkType), (directiveLine, Directive)] ++ catchDirectives postLines chunkType toEither (PFailed span err) = Left $ printErrorBag $ unitBag $ mkPlainErrMsg dflags span err
toEither (POk parseState result) =
isDirective line = startswith ":" $ strip line let parseEnd = loc parseState
endLine = srcLocLine parseEnd
extractDirectives [] = return [] endCol = srcLocCol parseEnd
(before, after) = splitAtLoc endLine endCol str in
-- | only go down one level Right (result, before, after)
treeToChunks :: Tree (Located Lexer.Token) -> [CodeChunk]
treeToChunks = convertToChunks (convertToChunks leafToChunk) -- Convert the bag of errors into an error string.
printErrorBag bag = unlines . map show $ bagToList bag
-- | Split a string at a given line and column.
--
-- >>> splitAtLoc 2 3 "abc\ndefghi\nxyz\n123"
-- ("abc\ndef","ghi\nxyz\n123")
--
-- >>> splitAtLoc 2 1 "abc"
-- ("abc","")
splitAtLoc :: LineNumber -> ColumnNumber -> String -> (String, String)
splitAtLoc line col string =
if line > length (lines string)
then (string, "")
else (before, after)
where where
convertToChunks recursion (Branch subnodes) = concatMap recursion subnodes (beforeLines, afterLines) = splitAt line $ lines string
convertToChunks _ (Leaf value) = makeChunk value theLine = last beforeLines
(beforeChars, afterChars) = splitAt col theLine
leafToChunk (Leaf value) = makeChunk value
leafToChunk _ = []
makeChunk (L (RealSrcSpan location) token) = [Chunk location $ classifyToken token] -- Not the same as 'unlines', due to trailing \n
makeChunk _ = [] joinLines = intercalate "\n"
-- | Classifies a token based on what type of Haskell form it is likely to
-- be part of. Certain tokens can mean you are in an import or in
-- a declaration. However, you can have declarations inside expressions or
-- statements when between curly brackets. After lexing the input, lines
-- are classified based on tokens using `classifyToken` and then the
-- original input is split based on these classifications.
classifyToken :: Lexer.Token -> ChunkType
classifyToken tok = case tok of
ITclass -> Decl
ITdata -> Decl
ITdefault -> Decl
ITderiving -> Decl
IThiding -> Decl
ITimport -> Import
ITinfix -> Decl
ITinfixl -> Decl
ITinfixr -> Decl
ITinstance -> Decl
ITmodule -> Decl
ITnewtype -> Decl
ITqualified -> Import
ITtype -> Decl
ITwhere -> Decl
ITscc -> Decl
ITforeign -> Decl
ITexport -> Decl
ITlabel -> Decl -- ?
ITdynamic -> Decl
ITsafe -> Decl
ITinterruptible -> Decl
ITunsafe -> Decl
ITstdcallconv -> Decl
ITccallconv -> Decl
ITcapiconv -> Decl
ITprimcallconv -> Decl
ITfamily -> Decl
ITinline_prag {} -> Decl
ITspec_prag {} -> Decl
ITspec_inline_prag {} -> Decl
ITsource_prag {} -> Decl
ITrules_prag {} -> Decl
ITwarning_prag {} -> Decl
ITdeprecated_prag {} -> Decl
ITline_prag {} -> Decl
ITscc_prag -> Decl
ITgenerated_prag -> Decl
ITcore_prag -> Decl
ITunpack_prag -> Decl
ITnounpack_prag -> Decl
ITann_prag -> Decl
ITclose_prag -> Decl
IToptions_prag {} -> Decl
ITinclude_prag {} -> Decl
ITlanguage_prag -> Decl
ITvect_prag -> Decl -- ?
ITvect_scalar_prag -> Decl
ITnovect_prag -> Decl
ITctype -> Decl
ITdcolon -> Decl
ITequal -> Decl
ITvbar -> Decl -- |
ITdotdot -> Expr -- [1 .. ]
ITcolon -> Expr
ITcase -> Expr
ITdo -> Expr
ITelse -> Expr
ITif -> Expr
ITin -> Expr
ITlet -> Expr
ITof -> Expr
ITthen -> Expr
ITforall -> Expr
ITmdo -> Expr
ITgroup -> Expr -- SQL comprehensions.
ITby -> Expr
ITusing -> Expr
ITlam -> Expr
ITlcase -> Expr
ITlarrow -> Expr
ITrarrow -> Expr
ITat -> Expr
ITtilde -> Expr
ITtildehsh -> Expr
ITdarrow -> Expr
ITminus -> Expr
ITbang -> Expr
ITstar -> Expr
ITdot -> Expr
ITbiglam -> Expr
ITocurly -> Expr
ITccurly -> Expr
ITvocurly -> Expr
ITvccurly -> Expr
ITobrack -> Expr
ITopabrack -> Expr
ITcpabrack -> Expr
ITcbrack -> Expr
IToparen -> Expr
ITcparen -> Expr
IToubxparen -> Expr
ITcubxparen -> Expr
ITsemi -> Expr
ITcomma -> Expr
ITunderscore -> Expr
ITbackquote -> Expr
ITsimpleQuote -> Expr
ITvarid {} -> Expr
ITconid {} -> Expr
ITvarsym {} -> Expr
ITconsym {} -> Expr
ITqvarid {} -> Expr
ITqconid {} -> Expr
ITqvarsym {} -> Expr
ITqconsym {} -> Expr
ITprefixqvarsym {} -> Expr
ITprefixqconsym {} -> Expr
ITdupipvarid {} -> Expr
ITchar {} -> Expr
ITstring {} -> Expr
ITinteger{} -> Expr
ITrational{} -> Expr
ITprimchar {} -> Expr
ITprimstring{} -> Expr
ITprimint {} -> Expr
ITprimword {} -> Expr
ITprimfloat {} -> Expr
ITprimdouble {} -> Expr
ITopenExpQuote -> Expr
ITopenPatQuote -> Expr
ITopenDecQuote -> Expr
ITopenTypQuote -> Expr
ITcloseQuote -> Expr
ITidEscape {} -> Expr
ITparenEscape -> Expr
ITtyQuote -> Expr
ITquasiQuote {} -> Expr
ITqQuasiQuote {} -> Expr
ITproc -> Expr
ITrec -> Expr
IToparenbar -> Expr
ITcparenbar -> Expr
ITlarrowtail -> Expr
ITrarrowtail -> Expr
ITLarrowtail -> Expr
ITRarrowtail -> Expr
ITunknown {} -> Expr
ITeof -> Expr
ITdocCommentNext {} -> Expr
ITdocCommentPrev {} -> Expr
ITdocCommentNamed {} -> Expr
ITdocSection {} -> Expr
ITdocOptions {} -> Expr
ITdocOptionsOld {} -> Expr
ITlineComment {} -> Expr
ITblockComment {} -> Expr
-- All constructors are listed above.
-- A new keyword addition to GHC will trigger a warning here.
-- | Runs the GHC lexer on the code string. Returns an error string or
-- a list of tokens and locations for each token.
runLexer :: DynFlags -> String -> Either String [Located Token]
runLexer flags codeString = toEither (lexTokenStream buffer location flags)
where
-- Location displayed as the parsing location.
filename = "<interactive>"
initLine = 1
initCol = 1
location = mkRealSrcLoc (mkFastString filename) initLine initCol
buffer = stringToStringBuffer codeString
-- Convert a parse success or failure into an Either type. before = joinLines (init beforeLines) ++ '\n' : beforeChars
toEither (PFailed span err) = Left $ printErrorBag $ unitBag $ mkPlainErrMsg flags span err after = afterChars ++ '\n' : joinLines afterLines
toEither (POk _ tokens) = Right tokens
printErrorBag bag = unlines $ map show $ bagToList bag
{-# LANGUAGE NoImplicitPrelude, OverloadedStrings #-}
-- | Description : Shell scripting wrapper using @Shelly@ for the @notebook@, @setup@, and -- | Description : Shell scripting wrapper using @Shelly@ for the @notebook@, @setup@, and
-- @console@ commands. -- @console@ commands.
module IHaskell.IPython ( module IHaskell.IPython (
runIHaskell, runIHaskell,
setupIPythonProfile, setupIPythonProfile,
ipythonVersion ipythonVersion,
parseVersion
) where ) where
import ClassyPrelude import ClassyPrelude
...@@ -19,6 +21,10 @@ import qualified System.IO.Strict as StrictIO ...@@ -19,6 +21,10 @@ import qualified System.IO.Strict as StrictIO
import qualified IHaskell.Config as Config import qualified IHaskell.Config as Config
-- $setup
-- >>> import ClassyPrelude
-- >>> import IHaskell.IPython
-- | Run IPython with any arguments. -- | Run IPython with any arguments.
ipython :: Bool -- ^ Whether to suppress output. ipython :: Bool -- ^ Whether to suppress output.
-> [Text] -- ^ IPython command line arguments. -> [Text] -- ^ IPython command line arguments.
...@@ -44,13 +50,10 @@ ipythonVersion = shelly $ do ...@@ -44,13 +50,10 @@ ipythonVersion = shelly $ do
[major, minor, patch] <- parseVersion <$> ipython True ["--version"] [major, minor, patch] <- parseVersion <$> ipython True ["--version"]
return (major, minor, patch) return (major, minor, patch)
{- | -- | Parse an IPython version string into a list of integers.
--
>>> parseVersion `map` ["2.0.0-dev", "2.0.0-alpha", "12.5.10"] -- >>> parseVersion `map` ["2.0.0-dev", "2.0.0-alpha", "12.5.10"]
[[2,0,0],[2,0,0],[12,5,10]] -- [[2,0,0],[2,0,0],[12,5,10]]
-}
parseVersion :: String -> [Int] parseVersion :: String -> [Int]
parseVersion versionStr = map read' $ split "." versionStr parseVersion versionStr = map read' $ split "." versionStr
where read' x = case reads x of where read' x = case reads x of
......
{-# LANGUAGE NoImplicitPrelude, OverloadedStrings #-}
-- | Description : UUID generator and data structure -- | Description : UUID generator and data structure
-- --
-- Generate, parse, and pretty print UUIDs for use with IPython. -- Generate, parse, and pretty print UUIDs for use with IPython.
......
{-# LANGUAGE NoImplicitPrelude, OverloadedStrings #-}
{-# LANGUAGE CPP #-} {-# LANGUAGE CPP #-}
-- | Description : @ToJSON@ for Messages -- | Description : @ToJSON@ for Messages
-- --
...@@ -18,6 +19,7 @@ ghcVersionInts :: [Int] ...@@ -18,6 +19,7 @@ ghcVersionInts :: [Int]
ghcVersionInts = ints . map read . words . map dotToSpace $ (VERSION_ghc :: String) ghcVersionInts = ints . map read . words . map dotToSpace $ (VERSION_ghc :: String)
where dotToSpace '.' = ' ' where dotToSpace '.' = ' '
dotToSpace x = x dotToSpace x = x
--ghcVersionInts = [7,6,3]
-- Convert message bodies into JSON. -- Convert message bodies into JSON.
instance ToJSON Message where instance ToJSON Message where
......
{-# LANGUAGE NoImplicitPrelude, OverloadedStrings #-}
-- | Description : Low-level ZeroMQ communication wrapper. -- | Description : Low-level ZeroMQ communication wrapper.
-- --
-- The "ZeroMQ" module abstracts away the low-level 0MQ based interface with IPython, -- The "ZeroMQ" module abstracts away the low-level 0MQ based interface with IPython,
......
{-# LANGUAGE OverloadedStrings #-}
import System.Process import System.Process
import System.Exit
import System.IO
import Test.DocTest import Test.DocTest
import Data.Char
import System.Environment import System.Environment
import Data.String.Utils
-- | tests that all the >>> comments are followed by correct output. Easiest is to -- | tests that all the >>> comments are followed by correct output. Easiest is to
-- --
...@@ -18,17 +17,24 @@ import System.Environment ...@@ -18,17 +17,24 @@ import System.Environment
-- > runghc examples/rundoctests.hs Data/HList/File1.hs Data/HList/File2.hs -- > runghc examples/rundoctests.hs Data/HList/File1.hs Data/HList/File2.hs
-- --
-- you need Cabal >= 1.18 since that's around when cabal repl got added. -- you need Cabal >= 1.18 since that's around when cabal repl got added.
main :: IO ()
main = do main = do
as <- getArgs -- Get files to run on.
o <- readProcess args <- getArgs
"cabal" ["repl","--ghc-options","-v0 -w"]
":show packages\n:show language"
let flags = words $ unlines $ filter ((=="-") . take 1 . dropWhile isSpace)
$ lines o
let files = case as of -- Get flags via cabal repl.
let cabalCmds = unlines [":show packages", ":show language"]
cabalOpts = ["repl","--ghc-options","-v0 -w"]
options <- readProcess "cabal" cabalOpts cabalCmds
let extraFlags = ["-fobject-code", "-XNoImplicitPrelude"]
flags = words (unlines $ filter (startswith "-" . strip) $ lines options) ++ extraFlags
let files = case args of
[] -> ["Main.hs"] [] -> ["Main.hs"]
_ -> as _ -> args
putStrLn "Testing:\n--------"
mapM_ putStrLn files
putStr "\n"
doctest $ "-i.": "-idist/build/autogen": doctest $ "-i.": "-idist/build/autogen":
"-optP-include": "-optP-include":
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment