Compare commits

..

2 Commits

Author SHA1 Message Date
Patrick Aldis
be5e487e31 refactor repo to datalog/datalog-lsp 2026-03-02 16:19:40 +00:00
Patrick Aldis
a42a546586 Hover displays the contents of the document 2026-02-27 12:07:52 +00:00
4 changed files with 206 additions and 28 deletions

View File

@ -1,39 +1,41 @@
{-# LANGUAGE DuplicateRecordFields #-}
{-# LANGUAGE OverloadedStrings #-}
import Control.Lens ((^.))
import Control.Monad.IO.Class
import Data.Either (fromRight)
import Data.Maybe (fromJust)
import Language.LSP.Protocol.Lens (params, textDocument, uri)
import Language.LSP.Protocol.Message
import Language.LSP.Protocol.Types
import Language.LSP.Server
import Data.Either (fromRight)
import Language.LSP.VFS (virtualFileText)
handlers :: Handlers (LspM ())
handlers =
mconcat
[ notificationHandler SMethod_Initialized $ \_ -> pure ()
, requestHandler SMethod_TextDocumentHover $ \req responder -> do
let TRequestMessage _ _ _ (HoverParams _doc pos _workDone) = req
let
docUri = toNormalizedUri $ req ^. (params . textDocument . uri)
TRequestMessage _ _ _ (HoverParams _doc pos _workDone) = req
Position _l _c' = pos
rsp = Hover (InL ms) (Just range)
ms = mkMarkdown "Hello world"
rsp txt = Hover (InL . mkMarkdown $ txt) (Just range)
range = Range pos pos
responder (Right $ InL rsp)
c <- fromJust <$> getVirtualFile docUri
responder (Right . InL . rsp . virtualFileText $ c)
, requestHandler SMethod_TextDocumentSemanticTokensFull $ \req responder -> do
let TRequestMessage _ _ _ (SemanticTokensParams _doc _workDone _partial) = req
-- Each token is encoded as 5 uints:
-- [deltaLine, deltaStartChar, length, tokenTypeIndex, tokenModifiersBitset]
-- This example returns a single token at (0,0) of length 5.
let -- tokenTypeIndex=0 is "whatever the legend's 0 is"
token = SemanticTokenAbsolute {
_line = 0,
_startChar = 0,
_length = 5,
_tokenType = SemanticTokenTypes_Keyword,
_tokenModifiers = []
}
docUri = toNormalizedUri $ req ^. (params . textDocument . uri)
token =
SemanticTokenAbsolute
{ _line = 0
, _startChar = 0
, _length = 5
, _tokenType = SemanticTokenTypes_Keyword
, _tokenModifiers = []
}
tokens = fromRight (error "failed") $ makeSemanticTokens defaultSemanticTokensLegend [token]
c <- fromJust <$> getVirtualFile docUri
responder (Right $ InL tokens)
]

View File

@ -0,0 +1,71 @@
module Datalog.Parser
( parseTerm
, parseAtom
, parseRule
, parseQuery
, parseProgram
)
where
import Datalog.Syntax
import Data.Text (Text)
import Data.Text qualified as T
import Data.Void (Void)
import Text.Megaparsec
import Text.Megaparsec.Char
import Text.Megaparsec.Char.Lexer qualified as L
type Parser = Parsec Void Text
lexeme :: (MonadParsec e Text m) => m a -> m a
lexeme = L.lexeme whitespace
symbol :: (MonadParsec e Text m) => Text -> m Text
symbol = L.symbol whitespace
whitespace :: (MonadParsec e Text m) => m ()
whitespace = L.space
space1
(L.skipLineComment "--")
(L.skipBlockComment "{-" "-}")
parens :: (MonadParsec e Text m) => m a -> m a
parens = between (symbol "(") (symbol ")")
comma, period :: (MonadParsec e Text m) => m ()
comma = () <$ symbol ","
period = () <$ symbol "."
parseCon :: (MonadParsec e Text m) => m Term
parseCon = Con . ConId . T.pack <$> lexeme (liftA2 (:) (numberChar <|> lowerChar) (many alphaNumChar))
parseVar :: (MonadParsec e Text m) => m Term
parseVar = Var . VarId . T.pack <$> lexeme (liftA2 (:) upperChar (many alphaNumChar))
parseTerm :: Parser Term
parseTerm = parseVar <|> parseCon
parseAtom :: Parser Atom
parseAtom = do
rel <- RelId . T.pack <$> lexeme (liftA2 (:) lowerChar (many alphaNumChar))
args <- parens (parseTerm `sepBy` comma)
return (Atom rel args)
parseQuery :: Parser [Atom]
parseQuery = parseAtom `sepBy` comma
parseFact :: Parser Rule
parseFact = do
headAtom <- parseAtom
period
return (headAtom :- [])
parseRule :: Parser Rule
parseRule = try parseFact <|> do
headAtom <- parseAtom <* symbol ":-"
bodyAtoms <- parseQuery
period
return (headAtom :- bodyAtoms)
parseProgram :: Parser Program
parseProgram = Program <$> many parseRule

View File

@ -0,0 +1,86 @@
module Datalog.Syntax where
import Data.Char (isUpper)
import Data.Text (Text)
import Data.Text qualified as T
import Data.Set qualified as Set
import Data.Set (Set)
newtype ConId = ConId Text
deriving (Eq, Ord, Show)
newtype VarId = VarId Text
deriving (Eq, Ord, Show)
newtype RelId = RelId Text
deriving (Eq, Ord, Show)
data Term = Con ConId | Var VarId
deriving (Eq, Ord, Show)
con :: Text -> Term
con = Con . ConId
var :: Text -> Term
var = Var . VarId
term :: Text -> Term
term t = if not (T.null t) && isUpper (T.head t) then var t else con t
data Atom = Atom RelId [Term]
deriving (Eq, Ord, Show)
atom :: Text -> [Text] -> Atom
atom relName args = Atom (RelId relName) (map term args)
data Rule = Atom :- [Atom]
deriving (Eq, Ord, Show)
data Program = Program [Rule]
deriving (Eq, Ord, Show)
class HasConstants a where
constants :: a -> Set ConId
instance HasConstants Term where
constants t = case t of
Con x -> Set.singleton x
Var _ -> Set.empty
instance HasConstants a => HasConstants [a] where
constants xs = Set.unions (map constants xs)
instance HasConstants Atom where
constants (Atom _ ts) = constants ts
instance HasConstants Rule where
constants (h :- b) = Set.union (constants h) (constants b)
instance HasConstants Program where
constants (Program rs) = constants rs
class Pretty t where
pretty :: t -> Text
instance Pretty ConId where
pretty (ConId t) = t
instance Pretty VarId where
pretty (VarId t) = t
instance Pretty RelId where
pretty (RelId t) = t
instance Pretty Term where
pretty (Con conId) = pretty conId
pretty (Var varId) = pretty varId
instance Pretty Atom where
pretty (Atom relId terms) = pretty relId <> "(" <> T.intercalate "," (map pretty terms) <> ")"
instance Pretty Rule where
pretty (h :- []) = pretty h <> "."
pretty (h :- ts) = pretty h <> " :- " <> T.intercalate ", " (map pretty ts) <> "."
instance Pretty Program where
pretty (Program rs) = T.unlines (map pretty rs)

View File

@ -15,13 +15,32 @@ extra-doc-files: CHANGELOG.md
common warnings
ghc-options: -Wall
executable geolog-lsp
import: warnings
main-is: Main.hs
-- other-modules:
-- other-extensions:
build-depends: base,
lsp,
text
hs-source-dirs: src
common extensions
default-extensions:
OverloadedRecordDot
OverloadedStrings
library datalog-parser
import: warnings, extensions
build-depends:
base,
text,
containers,
megaparsec
exposed-modules:
Datalog.Parser,
Datalog.Syntax
hs-source-dirs: datalog/src
default-language: GHC2024
executable datalog-lsp
import: warnings, extensions
main-is: Main.hs
build-depends:
base,
lsp,
text,
containers,
lens
hs-source-dirs: datalog-lsp/src
default-language: GHC2024