diff --git a/parser-typechecker/src/Unison/PrintError.hs b/parser-typechecker/src/Unison/PrintError.hs index 435bb2c2d3..3a6c26b664 100644 --- a/parser-typechecker/src/Unison/PrintError.hs +++ b/parser-typechecker/src/Unison/PrintError.hs @@ -20,11 +20,13 @@ import Unison.Builtin.Decls (unitRef, pattern TupleType') import Unison.Codebase.Path qualified as Path import Unison.ConstructorReference (ConstructorReference, GConstructorReference (..)) import Unison.HashQualified (HashQualified) +import Unison.HashQualified' qualified as HQ' import Unison.Kind (Kind) import Unison.Kind qualified as Kind import Unison.KindInference.Error.Pretty (prettyKindError) import Unison.Name (Name) import Unison.Name qualified as Name +import Unison.NameSegment (NameSegment (..)) import Unison.Names qualified as Names import Unison.Names.ResolutionResult qualified as Names import Unison.Parser.Ann (Ann (..)) @@ -1629,16 +1631,17 @@ renderParseErrors s = \case then unknownTypesMsg else unknownTypesMsg <> "\n\n" <> dupDataAndAbilitiesMsg in (msgs, allRanges) - go (Parser.DidntExpectExpression _tok (Just t@(L.payload -> L.SymbolyId "::" Nothing))) = - let msg = - mconcat - [ "This looks like the start of an expression here but I was expecting a binding.", - "\nDid you mean to use a single " <> style Code ":", - " here for a type signature?", - "\n\n", - tokenAsErrorSite s t - ] - in (msg, [rangeForToken t]) + go (Parser.DidntExpectExpression _tok (Just t@(L.payload -> L.SymbolyId (HQ'.NameOnly name)))) + | name == Name.fromSegment (NameSegment "::") = + let msg = + mconcat + [ "This looks like the start of an expression here but I was expecting a binding.", + "\nDid you mean to use a single " <> style Code ":", + " here for a type signature?", + "\n\n", + tokenAsErrorSite s t + ] + in (msg, [rangeForToken t]) go (Parser.DidntExpectExpression tok _nextTok) = let msg = mconcat diff --git a/parser-typechecker/src/Unison/Syntax/DeclParser.hs b/parser-typechecker/src/Unison/Syntax/DeclParser.hs index 172509e7b9..fd6b485f3c 100644 --- a/parser-typechecker/src/Unison/Syntax/DeclParser.hs +++ b/parser-typechecker/src/Unison/Syntax/DeclParser.hs @@ -6,7 +6,6 @@ where import Control.Lens import Control.Monad.Reader (MonadReader (..)) import Data.Map qualified as Map -import Data.Text qualified as Text import Text.Megaparsec qualified as P import Unison.ABT qualified as ABT import Unison.DataDeclaration (DataDeclaration, EffectDeclaration) @@ -113,11 +112,11 @@ modifier = do where unique = do tok <- openBlockWith "unique" - optional (openBlockWith "[" *> wordyIdString <* closeBlock) >>= \case + optional (openBlockWith "[" *> importWordyId <* closeBlock) >>= \case Nothing -> do guid <- uniqueName 32 pure (UnresolvedModifier'UniqueWithoutGuid guid <$ tok) - Just guid -> pure (UnresolvedModifier'UniqueWithGuid (Text.pack (L.payload guid)) <$ tok) + Just guid -> pure (UnresolvedModifier'UniqueWithGuid (Name.toText (L.payload guid)) <$ tok) structural = do tok <- openBlockWith "structural" pure (UnresolvedModifier'Structural <$ tok) diff --git a/parser-typechecker/src/Unison/Syntax/FileParser.hs b/parser-typechecker/src/Unison/Syntax/FileParser.hs index 9390053e49..dc812c972b 100644 --- a/parser-typechecker/src/Unison/Syntax/FileParser.hs +++ b/parser-typechecker/src/Unison/Syntax/FileParser.hs @@ -9,6 +9,7 @@ import Unison.ABT qualified as ABT import Unison.DataDeclaration (DataDeclaration) import Unison.DataDeclaration qualified as DD import Unison.Name qualified as Name +import Unison.NameSegment (NameSegment (..)) import Unison.Names qualified as Names import Unison.Names.ResolutionResult qualified as Names import Unison.NamesWithHistory qualified as Names @@ -16,7 +17,7 @@ import Unison.Parser.Ann (Ann) import Unison.Prelude import Unison.Syntax.DeclParser (declarations) import Unison.Syntax.Lexer qualified as L -import Unison.Syntax.Name qualified as Name (unsafeFromVar) +import Unison.Syntax.Name qualified as Name (toString, unsafeFromVar) import Unison.Syntax.Parser import Unison.Syntax.TermParser qualified as TermParser import Unison.Term (Term) @@ -216,10 +217,10 @@ stanza = watchExpression <|> unexpectedAction <|> binding watched :: (Monad m, Var v) => P v m (UF.WatchKind, Text, Ann) watched = P.try do - kind <- optional wordyIdString + kind <- (fmap . fmap . fmap) Name.toString (optional importWordyId) guid <- uniqueName 10 - op <- optional (L.payload <$> P.lookAhead symbolyIdString) - guard (op == Just ">") + op <- optional (L.payload <$> P.lookAhead importSymbolyId) + guard (op == Just (Name.fromSegment (NameSegment ">"))) tok <- anyToken guard $ maybe True (`L.touches` tok) kind pure (maybe UF.RegularWatch L.payload kind, guid, maybe mempty ann kind <> ann tok) diff --git a/parser-typechecker/src/Unison/Syntax/TermParser.hs b/parser-typechecker/src/Unison/Syntax/TermParser.hs index c6c27de716..ae63b032c3 100644 --- a/parser-typechecker/src/Unison/Syntax/TermParser.hs +++ b/parser-typechecker/src/Unison/Syntax/TermParser.hs @@ -30,8 +30,11 @@ import Unison.Builtin.Decls qualified as DD import Unison.ConstructorReference (ConstructorReference, GConstructorReference (..)) import Unison.ConstructorType qualified as CT import Unison.HashQualified qualified as HQ +import Unison.HashQualified' qualified as HQ' import Unison.Name (Name) import Unison.Name qualified as Name +import Unison.NameSegment (NameSegment (..)) +import Unison.NameSegment qualified as NameSegment import Unison.Names (Names) import Unison.Names qualified as Names import Unison.NamesWithHistory qualified as Names @@ -402,16 +405,22 @@ hashQualifiedPrefixTerm = resolveHashQualified =<< hqPrefixId hashQualifiedInfixTerm :: (Monad m, Var v) => TermP v m hashQualifiedInfixTerm = resolveHashQualified =<< hqInfixId -quasikeyword :: (Ord v) => String -> P v m (L.Token ()) +quasikeyword :: Ord v => Text -> P v m (L.Token ()) quasikeyword kw = queryToken \case - L.WordyId s Nothing | s == kw -> Just () + L.WordyId (HQ'.NameOnly n) | nameIsKeyword n kw -> Just () _ -> Nothing -symbolyQuasikeyword :: (Ord v) => String -> P v m (L.Token ()) +symbolyQuasikeyword :: (Ord v) => Text -> P v m (L.Token ()) symbolyQuasikeyword kw = queryToken \case - L.SymbolyId s Nothing | s == kw -> Just () + L.SymbolyId (HQ'.NameOnly n) | nameIsKeyword n kw -> Just () _ -> Nothing +nameIsKeyword :: Name -> Text -> Bool +nameIsKeyword name keyword = + case (Name.isRelative name, Name.reverseSegments name) of + (True, segment NonEmpty.:| []) -> NameSegment.toText segment == keyword + _ -> False + -- If the hash qualified is name only, it is treated as a var, if it -- has a short hash, we resolve that short hash immediately and fail -- committed if that short hash can't be found in the current environment @@ -960,9 +969,9 @@ bang = P.label "bang" do seqOp :: (Ord v) => P v m Pattern.SeqOp seqOp = - (Pattern.Snoc <$ matchToken (L.SymbolyId ":+" Nothing)) - <|> (Pattern.Cons <$ matchToken (L.SymbolyId "+:" Nothing)) - <|> (Pattern.Concat <$ matchToken (L.SymbolyId "++" Nothing)) + Pattern.Snoc <$ matchToken (L.SymbolyId (HQ'.fromName (Name.fromSegment (NameSegment ":+")))) + <|> Pattern.Cons <$ matchToken (L.SymbolyId (HQ'.fromName (Name.fromSegment (NameSegment "+:")))) + <|> Pattern.Concat <$ matchToken (L.SymbolyId (HQ'.fromName (Name.fromSegment (NameSegment "++")))) term4 :: (Monad m, Var v) => TermP v m term4 = f <$> some termLeaf diff --git a/parser-typechecker/src/Unison/Syntax/TypeParser.hs b/parser-typechecker/src/Unison/Syntax/TypeParser.hs index 5fc1525382..c523e8e6c4 100644 --- a/parser-typechecker/src/Unison/Syntax/TypeParser.hs +++ b/parser-typechecker/src/Unison/Syntax/TypeParser.hs @@ -5,6 +5,9 @@ import Data.Set qualified as Set import Text.Megaparsec qualified as P import Unison.Builtin.Decls qualified as DD import Unison.HashQualified qualified as HQ +import Unison.HashQualified' qualified as HQ' +import Unison.Name qualified as Name +import Unison.NameSegment (NameSegment (NameSegment)) import Unison.NamesWithHistory qualified as Names import Unison.Parser.Ann (Ann (..)) import Unison.Prelude @@ -113,6 +116,6 @@ forall :: (Var v) => TypeP v m -> TypeP v m forall rec = do kw <- reserved "forall" <|> reserved "∀" vars <- fmap (fmap L.payload) . some $ prefixDefinitionName - _ <- matchToken $ L.SymbolyId "." Nothing + _ <- matchToken $ L.SymbolyId (HQ'.fromName (Name.fromSegment (NameSegment "."))) t <- rec pure $ Type.foralls (ann kw <> ann t) vars t diff --git a/unison-core/src/Unison/HashQualified'.hs b/unison-core/src/Unison/HashQualified'.hs index a8a68c6fa9..48bacfc6d1 100644 --- a/unison-core/src/Unison/HashQualified'.hs +++ b/unison-core/src/Unison/HashQualified'.hs @@ -79,6 +79,11 @@ fromNamedReference n r = HashQualified n (Reference.toShortHash r) fromName :: n -> HashQualified n fromName = NameOnly +fromNameHash :: n -> Maybe ShortHash -> HashQualified n +fromNameHash name = \case + Nothing -> NameOnly name + Just hash -> HashQualified name hash + matchesNamedReferent :: (Eq n) => n -> Referent -> HashQualified n -> Bool matchesNamedReferent n r = \case NameOnly n' -> n' == n diff --git a/unison-core/src/Unison/Name.hs b/unison-core/src/Unison/Name.hs index bb961c09e9..58a096fc61 100644 --- a/unison-core/src/Unison/Name.hs +++ b/unison-core/src/Unison/Name.hs @@ -5,6 +5,7 @@ module Unison.Name -- * Basic construction cons, + snoc, joinDot, fromSegment, fromSegments, @@ -120,6 +121,13 @@ cons x name = ("cannot cons " ++ show x ++ " onto absolute name" ++ show name) Name Relative (y :| ys) -> Name Relative (y :| ys ++ [x]) +-- | Snoc a name segment onto the end of a name. +-- +-- /O(1)/. +snoc :: Name -> NameSegment -> Name +snoc (Name pos (s1 :| ss)) s0 = + Name pos (s0 :| s1 : ss) + -- | Return the number of name segments in a name. -- -- /O(n)/, where /n/ is the number of name segments. diff --git a/unison-syntax/src/Unison/Syntax/Lexer.hs b/unison-syntax/src/Unison/Syntax/Lexer.hs index 70018c3272..9e0beb95f5 100644 --- a/unison-syntax/src/Unison/Syntax/Lexer.hs +++ b/unison-syntax/src/Unison/Syntax/Lexer.hs @@ -1,8 +1,4 @@ -{-# LANGUAGE BangPatterns #-} -{-# LANGUAGE DeriveFoldable #-} -{-# LANGUAGE DeriveTraversable #-} {-# LANGUAGE TemplateHaskell #-} -{-# LANGUAGE ViewPatterns #-} module Unison.Syntax.Lexer ( Token (..), @@ -11,6 +7,7 @@ module Unison.Syntax.Lexer Err (..), Pos (..), Lexeme (..), + lexemeToHQName, lexer, simpleWordyId, simpleSymbolyId, @@ -52,10 +49,18 @@ import Text.Megaparsec.Char qualified as CP import Text.Megaparsec.Char.Lexer qualified as LP import Text.Megaparsec.Error qualified as EP import Text.Megaparsec.Internal qualified as PI +import Unison.HashQualified qualified as HQ +import Unison.HashQualified' qualified as HQ' import Unison.Lexer.Pos (Column, Line, Pos (Pos), column, line) +import Unison.Name (Name) +import Unison.Name qualified as Name +import Unison.NameSegment (NameSegment (NameSegment)) +import Unison.NameSegment qualified as NameSegment import Unison.Prelude import Unison.ShortHash (ShortHash) import Unison.ShortHash qualified as SH +import Unison.Syntax.HashQualified' qualified as HQ' (toString) +import Unison.Syntax.Name qualified as Name (unsafeFromString) import Unison.Util.Bytes qualified as Bytes import Unison.Util.Monoid (intercalateMap) @@ -129,8 +134,8 @@ data Lexeme | Reserved String -- reserved tokens such as `{`, `(`, `type`, `of`, etc | Textual String -- text literals, `"foo bar"` | Character Char -- character literals, `?X` - | WordyId String (Maybe ShortHash) -- a (non-infix) identifier - | SymbolyId String (Maybe ShortHash) -- an infix identifier + | WordyId (HQ'.HashQualified Name) -- a (non-infix) identifier + | SymbolyId (HQ'.HashQualified Name) -- an infix identifier | Blank String -- a typed hole or placeholder | Numeric String -- numeric literals, left unparsed | Bytes Bytes.Bytes -- bytes literals @@ -142,6 +147,13 @@ type IsVirtual = Bool -- is it a virtual semi or an actual semi? makePrisms ''Lexeme +lexemeToHQName :: Lexeme -> Maybe (HQ.HashQualified Name) +lexemeToHQName = \case + WordyId n -> Just (HQ'.toHQ n) + SymbolyId n -> Just (HQ'.toHQ n) + Hash sh -> Just (HQ.HashOnly sh) + _ -> Nothing + space :: P () space = LP.space @@ -303,7 +315,7 @@ lexer0' scope rem = | notLayout t1 && touches t1 t2 && isSigned num = t1 : Token - (SymbolyId (take 1 num) Nothing) + (SymbolyId (HQ'.fromName (Name.unsafeFromString (take 1 num)))) (start t2) (inc $ start t2) : Token (Numeric (drop 1 num)) (inc $ start t2) (end t2) @@ -375,10 +387,10 @@ lexemes' eof = -- ability Foo where => ability Foo where tn <- subsequentTypeName pure $ case (tn, docToks) of - (Just (WordyId tname _), ht : _) + (Just (WordyId tname), ht : _) | isTopLevel -> startToks - <> [WordyId (tname <> ".doc") Nothing <$ ht, Open "=" <$ ht] + <> [WordyId (HQ'.fromName (Name.snoc (HQ'.toName tname) (NameSegment "doc"))) <$ ht, Open "=" <$ ht] <> docToks0 <> [Close <$ last docToks] <> endToks @@ -785,8 +797,10 @@ lexemes' eof = pure $ (fmap Reserved <$> typ) <> t blank = - separated wordySep $ - char '_' *> P.optional wordyIdSeg <&> (Blank . fromMaybe "") + separated wordySep do + _ <- char '_' + seg <- P.optional wordyIdSeg + pure (Blank (maybe "" (Text.unpack . NameSegment.toText) seg)) semi = char ';' $> Semi False textual = Textual <$> quoted @@ -820,39 +834,58 @@ lexemes' eof = wordyId :: P Lexeme wordyId = P.label wordyMsg . P.try $ do dot <- P.optional (lit ".") - segs <- P.sepBy1 wordyIdSeg (P.try (char '.' <* P.lookAhead (P.satisfy wordyIdChar))) - shorthash <- P.optional shorthash - pure $ WordyId (fromMaybe "" dot <> intercalate "." segs) shorthash + segs <- Nel.fromList <$> P.sepBy1 wordyIdSeg (P.try (char '.' <* P.lookAhead (P.satisfy wordyIdChar))) + hash <- P.optional shorthash + let name = (if isJust dot then Name.makeAbsolute else id) (Name.fromSegments segs) + pure (WordyId (HQ'.fromNameHash name hash)) where wordyMsg = "identifier (ex: abba1, snake_case, .foo.bar#xyz, or 🌻)" symbolyId :: P Lexeme symbolyId = P.label symbolMsg . P.try $ do dot <- P.optional (lit ".") - segs <- P.optional segs - shorthash <- P.optional shorthash + segs <- P.optional segments + hash <- P.optional shorthash case (dot, segs) of - (_, Just segs) -> pure $ SymbolyId (fromMaybe "" dot <> segs) shorthash + (_, Just segs) -> do + let name = (if isJust dot then Name.makeAbsolute else id) (Name.fromSegments segs) + pure (SymbolyId (HQ'.fromNameHash name hash)) -- a single . or .#somehash is parsed as a symboly id - (Just dot, Nothing) -> pure $ SymbolyId dot shorthash + (Just dot, Nothing) -> do + let name = Name.fromSegment (NameSegment (Text.pack dot)) + pure (SymbolyId (HQ'.fromNameHash name hash)) (Nothing, Nothing) -> fail symbolMsg where - segs = symbolyIdSeg <|> (wordyIdSeg <+> lit "." <+> segs) + segments :: P (Nel.NonEmpty NameSegment) + segments = + symbolySegments <|> wordySegments + + symbolySegments :: P (Nel.NonEmpty NameSegment) + symbolySegments = do + seg <- symbolyIdSeg + pure (seg Nel.:| []) + + wordySegments :: P (Nel.NonEmpty NameSegment) + wordySegments = do + seg0 <- wordyIdSeg + _ <- lit "." + seg1 Nel.:| segs <- segments + pure (seg0 Nel.:| seg1 : segs) symbolMsg = "operator (examples: +, Float./, List.++#xyz)" - symbolyIdSeg :: P String + symbolyIdSeg :: P NameSegment symbolyIdSeg = do start <- pos id <- P.takeWhile1P (Just symbolMsg) symbolyIdChar when (Set.member id reservedOperators) $ do stop <- pos P.customFailure (Token (ReservedSymbolyId id) start stop) - pure id + pure (NameSegment (Text.pack id)) - wordyIdSeg :: P String + wordyIdSeg :: P NameSegment -- wordyIdSeg = litSeg <|> (P.try do -- todo - wordyIdSeg = P.try $ do + wordyIdSeg = P.try do start <- pos ch <- P.satisfy wordyIdStartChar rest <- P.many (P.satisfy wordyIdChar) @@ -860,7 +893,7 @@ lexemes' eof = when (Set.member word keywords) $ do stop <- pos P.customFailure (Token (ReservedWordyId word) start stop) - pure (ch : rest) + pure (NameSegment (Text.pack (ch : rest))) {- -- ``an-identifier-with-dashes`` @@ -1142,11 +1175,13 @@ findClose :: [String] -> Layout -> Maybe (String, Int) findClose _ [] = Nothing findClose s ((h, _) : tl) = if h `elem` s then Just (h, 1) else fmap (1 +) <$> findClose s tl -simpleWordyId :: String -> Lexeme -simpleWordyId = flip WordyId Nothing +simpleWordyId :: Name -> Lexeme +simpleWordyId name = + WordyId (HQ'.fromName name) -simpleSymbolyId :: String -> Lexeme -simpleSymbolyId = flip SymbolyId Nothing +simpleSymbolyId :: Name -> Lexeme +simpleSymbolyId name = + SymbolyId (HQ'.fromName name) notLayout :: Token Lexeme -> Bool notLayout t = case payload t of @@ -1445,8 +1480,8 @@ instance P.VisualStream [Token Lexeme] where case showEscapeChar c of Just c -> "?\\" ++ [c] Nothing -> '?' : [c] - pretty (WordyId n h) = n ++ (toList h >>= Text.unpack . SH.toText) - pretty (SymbolyId n h) = n ++ (toList h >>= Text.unpack . SH.toText) + pretty (WordyId n) = HQ'.toString n + pretty (SymbolyId n) = HQ'.toString n pretty (Blank s) = "_" ++ s pretty (Numeric n) = n pretty (Hash sh) = show sh diff --git a/unison-syntax/src/Unison/Syntax/Parser.hs b/unison-syntax/src/Unison/Syntax/Parser.hs index 51d42470cf..07aa1afe6b 100644 --- a/unison-syntax/src/Unison/Syntax/Parser.hs +++ b/unison-syntax/src/Unison/Syntax/Parser.hs @@ -44,14 +44,12 @@ module Unison.Syntax.Parser sepBy1, string, symbolyDefinitionName, - symbolyIdString, tok, tokenToPair, tupleOrParenthesized, uniqueBase32Namegen, uniqueName, wordyDefinitionName, - wordyIdString, wordyPatternName, ) where @@ -74,8 +72,10 @@ import Unison.ABT qualified as ABT import Unison.ConstructorReference (ConstructorReference) import Unison.Hash qualified as Hash import Unison.HashQualified qualified as HQ +import Unison.HashQualified' qualified as HQ' import Unison.Hashable qualified as Hashable import Unison.Name as Name +import Unison.NameSegment (NameSegment (NameSegment)) import Unison.Names (Names) import Unison.Names.ResolutionResult qualified as Names import Unison.Parser.Ann (Ann (..)) @@ -85,7 +85,7 @@ import Unison.Prelude import Unison.Reference (Reference) import Unison.Referent (Referent) import Unison.Syntax.Lexer qualified as L -import Unison.Syntax.Name qualified as Name (unsafeFromString) +import Unison.Syntax.Name qualified as Name (toVar, unsafeFromString) import Unison.Term (MatchCase (..)) import Unison.UnisonFile.Error qualified as UF import Unison.Util.Bytes (Bytes) @@ -274,8 +274,9 @@ matchToken x = P.satisfy ((==) x . L.payload) importDotId :: (Ord v) => P v m (L.Token Name) importDotId = queryToken go where - go (L.SymbolyId "." Nothing) = Just (Name.unsafeFromString ".") - go _ = Nothing + go = \case + L.SymbolyId (HQ'.NameOnly name@(Name.reverseSegments -> NameSegment "." Nel.:| [])) -> Just name + _ -> Nothing -- Consume a virtual semicolon semi :: (Ord v) => P v m (L.Token ()) @@ -288,9 +289,9 @@ semi = label "newline or semicolon" $ queryToken go closeBlock :: (Ord v) => P v m (L.Token ()) closeBlock = void <$> matchToken L.Close -wordyPatternName :: (Var v) => P v m (L.Token v) +wordyPatternName :: Var v => P v m (L.Token v) wordyPatternName = queryToken \case - L.WordyId s Nothing -> Just $ Var.nameds s + L.WordyId (HQ'.NameOnly n) -> Just $ Name.toVar n _ -> Nothing -- Parse an prefix identifier e.g. Foo or (+), discarding any hash @@ -304,44 +305,36 @@ prefixTermName :: (Var v) => P v m (L.Token v) prefixTermName = wordyTermName <|> parenthesize symbolyTermName where wordyTermName = queryToken \case - L.WordyId s Nothing -> Just $ Var.nameds s + L.WordyId (HQ'.NameOnly n) -> Just $ Name.toVar n L.Blank s -> Just $ Var.nameds ("_" <> s) _ -> Nothing symbolyTermName = queryToken \case - L.SymbolyId s Nothing -> Just $ Var.nameds s + L.SymbolyId (HQ'.NameOnly n) -> Just $ Name.toVar n _ -> Nothing -- Parse a wordy identifier e.g. Foo, discarding any hash -wordyDefinitionName :: (Var v) => P v m (L.Token v) -wordyDefinitionName = queryToken \case - L.WordyId s _ -> Just $ Var.nameds s +wordyDefinitionName :: Var v => P v m (L.Token v) +wordyDefinitionName = queryToken $ \case + L.WordyId n -> Just $ Name.toVar (HQ'.toName n) L.Blank s -> Just $ Var.nameds ("_" <> s) _ -> Nothing --- Parse a wordyId as a String, rejecting any hash -wordyIdString :: (Ord v) => P v m (L.Token String) -wordyIdString = queryToken \case - L.WordyId s Nothing -> Just s - _ -> Nothing - -- Parse a wordyId as a Name, rejecting any hash -importWordyId :: (Ord v) => P v m (L.Token Name) -importWordyId = (fmap . fmap) Name.unsafeFromString wordyIdString +importWordyId :: Ord v => P v m (L.Token Name) +importWordyId = queryToken \case + L.WordyId (HQ'.NameOnly n) -> Just n + _ -> Nothing -- The `+` in: use Foo.bar + as a Name -importSymbolyId :: (Ord v) => P v m (L.Token Name) -importSymbolyId = (fmap . fmap) Name.unsafeFromString symbolyIdString - --- Parse a symbolyId as a String, rejecting any hash -symbolyIdString :: (Ord v) => P v m (L.Token String) -symbolyIdString = queryToken \case - L.SymbolyId s Nothing -> Just s +importSymbolyId :: Ord v => P v m (L.Token Name) +importSymbolyId = queryToken \case + L.SymbolyId (HQ'.NameOnly n) -> Just n _ -> Nothing --- Parse a symboly ID like >>= or Docs.&&, discarding any hash -symbolyDefinitionName :: (Var v) => P v m (L.Token v) -symbolyDefinitionName = queryToken \case - L.SymbolyId s _ -> Just $ Var.nameds s +-- Parse a symboly ID like >>= or &&, discarding any hash +symbolyDefinitionName :: Var v => P v m (L.Token v) +symbolyDefinitionName = queryToken $ \case + L.SymbolyId n -> Just $ Name.toVar (HQ'.toName n) _ -> Nothing parenthesize :: (Ord v) => P v m a -> P v m a @@ -352,21 +345,17 @@ hqPrefixId = hqWordyId_ <|> parenthesize hqSymbolyId_ hqInfixId = hqSymbolyId_ -- Parse a hash-qualified alphanumeric identifier -hqWordyId_ :: (Ord v) => P v m (L.Token (HQ.HashQualified Name)) +hqWordyId_ :: Ord v => P v m (L.Token (HQ.HashQualified Name)) hqWordyId_ = queryToken \case - L.WordyId "" (Just h) -> Just $ HQ.HashOnly h - L.WordyId s (Just h) -> Just $ HQ.HashQualified (Name.unsafeFromString s) h - L.WordyId s Nothing -> Just $ HQ.NameOnly (Name.unsafeFromString s) + L.WordyId n -> Just $ HQ'.toHQ n L.Hash h -> Just $ HQ.HashOnly h L.Blank s | not (null s) -> Just $ HQ.NameOnly (Name.unsafeFromString ("_" <> s)) _ -> Nothing -- Parse a hash-qualified symboly ID like >>=#foo or && -hqSymbolyId_ :: (Ord v) => P v m (L.Token (HQ.HashQualified Name)) +hqSymbolyId_ :: Ord v => P v m (L.Token (HQ.HashQualified Name)) hqSymbolyId_ = queryToken \case - L.SymbolyId "" (Just h) -> Just $ HQ.HashOnly h - L.SymbolyId s (Just h) -> Just $ HQ.HashQualified (Name.unsafeFromString s) h - L.SymbolyId s Nothing -> Just $ HQ.NameOnly (Name.unsafeFromString s) + L.SymbolyId n -> Just (HQ'.toHQ n) _ -> Nothing -- Parse a reserved word diff --git a/unison-syntax/test/Main.hs b/unison-syntax/test/Main.hs index 5393a4d033..06fea2eeef 100644 --- a/unison-syntax/test/Main.hs +++ b/unison-syntax/test/Main.hs @@ -6,10 +6,12 @@ import Data.Maybe (fromJust) import Data.Text qualified as Text import EasyTest import System.IO.CodePage (withCP65001) +import Unison.HashQualified' qualified as HQ' import Unison.Prelude import Unison.ShortHash (ShortHash) import Unison.ShortHash qualified as ShortHash import Unison.Syntax.Lexer +import Unison.Syntax.Name qualified as Name (unsafeFromString) main :: IO () main = @@ -87,8 +89,8 @@ test = ], t ".Foo.Bar.+" [simpleSymbolyId ".Foo.Bar.+"], -- idents with hashes - t "foo#bar" [WordyId "foo" (Just "#bar")], - t "+#bar" [SymbolyId "+" (Just "#bar")], + t "foo#bar" [WordyId (HQ'.HashQualified "foo" "#bar")], + t "+#bar" [SymbolyId (HQ'.HashQualified "+" "#bar")], -- note - these are all the same, just with different spacing let ex1 = "if x then y else z" ex2 = unlines ["if", " x", "then", " y", "else z"] @@ -196,7 +198,7 @@ test = suffix <- ["0", "x", "!", "'"] -- examples of wordyIdChar let i = kw ++ suffix -- a keyword at the front of an identifier should still be an identifier - pure $ t i [simpleWordyId i], + pure $ t i [simpleWordyId (Name.unsafeFromString i)], -- Test string literals t "\"simple string without escape characters\""