Aelve Codesearch

grep over package repositories
Please provide a string to search for.
3+ characters are required.
Index updated about a month ago

total matches: 713

atom-conduit-0.6.0.0
1 matches
test/Main.hs
  return $ testGroup "Atom golden tests" $ do
    xmlFile <- xmlFiles
    let goldenFile = addExtension xmlFile ".golden"
        f file = fmap (Lazy.encodeUtf8 . pShowNoColor) $ runResourceT $ runConduit $ sourceFile file .| Conduit.decodeUtf8 .| XML.parseText' def .| XML.force "Invalid <feed>" atomFeed
    return $ goldenVsString xmlFile goldenFile $ f xmlFile

properties :: TestTree
properties = testGroup "Properties"
  [ roundtripProperty "AtomText" (renderAtomText "test") (atomText "test")

            
bdcs-0.6.0
13 matches
src/BDCS/Import/NPM.hs
    homepage :: Maybe T.Text,
    license :: T.Text,

    -- This can show up in package.json in two ways: either as the map of executable
    -- names to js paths ("bin" : { "exec1": "./script1.js", "exec2": "./script2.js" }),
    -- or as a single string ("bin": "./script1.js"). The single string case should be
    -- interpreted as the path to an executable that should be named the same as the name
    -- of the package.
    bin :: Maybe [(T.Text, T.Text)],

            
src/BDCS/Import/NPM.hs
    -- we only want the bin symlinks for the top-level ones.
    -- If there is an explicit bin list, that takes precendence, otherwise use directories.bin.
    case (bin, binDirectory) of
        (Just binlist, _)      -> mapM_ (addBin sourceId) binlist
        (Nothing, Just binDir) -> addBinDir sourceId binDir
        _                      -> return ()

    -- similar thing for man pages
    case (man, manDirectory) of

            
src/BDCS/Import/NPM.hs

    -- similar thing for man pages
    case (man, manDirectory) of
        (Just manList, _)      -> mapM_ (addMan sourceId) manList
        (Nothing, Just manDir) -> addManDir sourceId manDir
        _                      -> return ()

    -- save the requirements as build key/vals. These are the semver requirement ranges.
    -- When the source is "linked" into a build, and from to an exportable group, the semvers

            
src/BDCS/Import/NPM.hs
    -- save the requirements as build key/vals. These are the semver requirement ranges.
    -- When the source is "linked" into a build, and from to an exportable group, the semvers
    -- will be translated to exact-version requirements and stored in the requirements table.
    mapM_ (\(reqname, reqver) -> insertSourceKeyValue (TextKey "dependency") reqname (Just reqver) sourceId) $
        fromMaybe [] dependencies

    -- mark the source as coming from npm
    -- TODO figure out a better way to express this kind of thing
    void $ insertSourceKeyValue (TextKey "npm") "" Nothing sourceId

            
src/BDCS/Import/NPM.hs
        binPrefix = normalise (T.unpack binDir)

        -- find paths where the directory component is the same as the prefix
        binFiles = filter (\p -> takeDirectory p == binPrefix) $ map (makeRelative "/" . T.unpack . filesPath) files
     in
        mapM_ (\p -> insertSourceKeyValue (TextKey "bin") (T.pack $ takeFileName p) (Just (T.pack p)) sourceId) binFiles

    addMan :: MonadIO m => Key Sources -> T.Text -> SqlPersistT m ()
    addMan sourceId manName = void $ insertSourceKeyValue (TextKey "man") (normaliseText manName) Nothing sourceId

    -- Unlike directories.bin, we do need to recurse into this directory

            
src/BDCS/Import/NPM.hs
    addManDir :: MonadIO m => Key Sources -> T.Text -> SqlPersistT m ()
    addManDir sourceId manDir = let
        manPrefix = normalise (T.unpack manDir)
        paths = map (makeRelative "/" . T.unpack . filesPath) files
        manFiles = filter (\p -> (manPrefix `isPrefixOf` p) && (p =~ ("\\.[0-9]$" :: String))) paths
     in
        mapM_ (\p -> insertSourceKeyValue (TextKey "man") (T.pack p) Nothing sourceId) manFiles

-- | Fetch an NPM from a given 'URI' and load it into the MDDB.  This function must be
-- run within the 'ReaderT' monad, which should be given an 'ImportState' record.  This
-- is how importing knows where to store the results.  Errors will be printed to the
-- screen.

            
src/BDCS/Import/NPM.hs
    -- TODO handle TarExceptions
    tarEntryToFile :: (MonadError String m, MonadThrow m, MonadResource m) => ContentStore -> Conduit CT.TarChunk m Files
    tarEntryToFile cs =
        -- Run the tar processing in a state with a map from FilePath to (ObjectDigest, CT.Size),
        -- so hardlinks can get the data they need from earlier entries.
        evalStateLC HM.empty $ CT.withEntries handleEntry
     where
        handleEntry :: (MonadState (HM.HashMap FilePath (ObjectDigest, CT.Size)) m, MonadError String m, MonadResource m) => CT.Header -> Conduit BS.ByteString m Files
        handleEntry header@CT.Header{..} = do

            
src/BDCS/Import/RPM.hs
import           Control.Monad.Trans.Resource(MonadResource, MonadThrow)
import qualified Data.ByteString.Char8 as C8
import           Data.CPIO(Entry(..))
import           Data.Conduit((.|), Conduit, Consumer, ZipConduit(..), await, awaitForever, mapOutput, runConduit, runConduitRes, transPipe, yield)
import           Data.Conduit.Combinators(sinkList)
import qualified Data.Conduit.List as CL
import           Data.ContentStore(ContentStore, CsError(..), runCsMonad, storeLazyByteStringC)
import           Data.ContentStore.Digest(ObjectDigest)
import           Database.Esqueleto

            
src/BDCS/Import/RPM.hs
import BDCS.RPM.Sources(mkSource)
import BDCS.Signatures(insertBuildSignatures)
import BDCS.Sources(insertSource)
import BDCS.Utils.Error(mapError)

{-# ANN buildImported ("HLint: ignore Use ." :: String) #-}

buildImported :: MonadResource m => [Tag] ->  SqlPersistT m Bool
buildImported sigs =

            
src/BDCS/Import/RPM.hs
    -- The first conduit just extracts filenames out of each cpio entry.  cpio puts a leading . on
    -- each filename, but the RPM headers do not have that.  Thus, we trim it out so the paths look
    -- the same.
        filenames = CL.map (T.dropWhile (== '.') . decodeUtf8 . cpioFileName) .| sinkList
    -- The second conduit extracts each file from a cpio entry, stores it in the content store,
    -- and returns its digest.
        digests = maybeStore .| sinkList

    -- And then those two conduits run in parallel and the results are packaged up together so

            
src/BDCS/Import/RPM.hs
        -- Checking the type is more complicated then you'd think it should be, because
        -- the type mode is more than just one bit. e.g., regular == 100000, symlink == 120000
        if fromIntegral cpioMode `intersectFileModes` fileTypeModes == regularFileMode then
            mapOutput Just $ yield cpioFileData .| storeLazyByteStringC repo
        else
            yield Nothing

-- | Load the headers from a parsed RPM into the MDDB.  The return value is whether or not an import
-- occurred.  This is not the same as success vs. failure.  If the package already exists in the

            
src/BDCS/Import/RPM.hs
    files     <- mkFiles tagHeaders checksums
    filesIds  <- insertFiles files

    -- Pair up files with their IDs in the Files table.  Then use this mapping to add all the
    -- various file-based labels to the KeyVal table.
    void $ apply (zip files filesIds)

    void $ associateFilesWithBuild filesIds buildId
    void $ associateFilesWithPackage filesIds pkgNameId

            
src/BDCS/Import/RPM.hs

    result <- runExceptT $ runConduitRes $
           getFromURI uri
        .| transPipe (mapError showParseError) parseRPMC
        .| transPipe (mapError showCsError) (consume repo db)

    case result of
        Left e     -> liftIO $ putStrLn e
        Right True -> liftIO $ putStrLn $ "Imported " ++ uriPath uri
        _          -> return ()

            
binary-ext-2.0.4
6 matches
src/Data/Conduit/Parsers/Text/Parser.hs
{-# INLINE castParser #-}

voidError :: Monad m => GetT s i o e m a -> GetT s i o () m a
voidError = mapError (const ())
{-# INLINE voidError #-}

anyError :: Monad m => GetT s i o e' m a -> GetT s i o e m a
anyError = mapError (const $ error "Data.Conduit.Parsers.Text.Parser.anyError")
{-# INLINE anyError #-}

skipCharIs :: Char -> Parser () ()
skipCharIs = void . pCharIs
{-# INLINE skipCharIs #-}

            
src/Data/Conduit/Parsers.hs
  -> GetT s (DecodingToken s) o (Either (Maybe Word64) e) m a
isolate !n !g = do
  !o1 <- elemsRead
  !r <- getC $ flip runStateC $ runExceptC $ fuseLeftovers id (go 0) (exceptC $ stateC $ flip runGetC $ mapError Right g)
  !o2 <- elemsRead
  if o2 - o1 < n
    then throwError $ Left $ Just $ o2 - o1
    else return r
  where

            
src/Data/Conduit/Parsers.hs
{-# INLINE endOfInput #-}

matchP :: (DecodingState s, Monoid (DecodingToken s), Monad m) => GetT s (DecodingToken s) o e m a -> GetT s (DecodingToken s) o e m (DecodingToken s, a)
matchP !p = (\(!t, !r) -> (foldl (flip mappend) mempty t, r)) <$> mapError snd (trackP p)
{-# INLINE matchP #-}

            
test/Data/Conduit/Parsers/Binary/Get/Spec.hs

get1 :: (DefaultDecodingState s, Monad m) => GetT s S.ByteString Word16 Bool m ()
get1 = do
  yield =<< mapError (const False) getWord16le
  yield =<< mapError (const False) getWord16le
  yield =<< mapError (const False) getWord16be
  ensureEof True

get2 :: Get () Word64
get2 = do
  skip 3

            
test/Data/Conduit/Parsers/Binary/Get/Spec.hs
  assertEqual "" 8 c

recordBody :: Get () [Word64]
recordBody = whileM (not <$> N.nullE) $ mapError (const ()) $ isolate 8 getWord64le

record :: Word64 -> Get (Either (Maybe Word64) ()) [Word64]
record z = isolate z recordBody

records :: (DefaultDecodingState s, Monad m) => GetT s S.ByteString [Word64] (Either (Maybe Word64) ()) m ()

            
test/Data/Conduit/Parsers/Binary/Get/Spec.hs
  let
    !g = do
      catchError i $ const $ return ()
      !r <- mapError Right $ getByteString 2
      ensureEof $ Right ()
      return r
  let (!e, !c) = runIdentity $ yield "ABCD" `connect` runGet g
  assertEqual "" (Right "CD") e
  assertEqual "" 4 c

            
codec-rpm-0.2.2
2 matches
examples/unrpm.hs
              getRPM path
           .| parseRPMC
           .| payloadContentsC
           .| DCC.mapM_ (liftIO . writeCpioEntry)
    either print return result

main :: IO ()
main = do
    -- Read the list of rpms to process from the command line arguments

            
examples/unrpm.hs
        putStrLn "Usage: unrpm RPM [RPM ...]"
        exitFailure

    mapM_ processRPM argv

            
conduit-algorithms-0.0.10.1
8 matches
Data/Conduit/Algorithms/Async.hs



-- | This is like 'Data.Conduit.List.map', except that each element is processed
-- in a separate thread (up to 'maxThreads' can be queued up at any one time).
-- Results are evaluated to normal form (not weak-head normal form!, i.e., the
-- structure is deeply evaluated) to ensure that the computation is fully
-- evaluated in the worker thread.
--

            
Data/Conduit/Algorithms/Async.hs
-- it is sometimes better to do
--
-- @
--    CC.conduitVector 4096 .| asyncMapC (V.map f) .| CC.concat
-- @
--
-- where @CC@ refers to 'Data.Conduit.Combinators'
--
-- See 'unorderedAsyncMapC'

            
Data/Conduit/Algorithms/Async.hs
                    C.runConduit $
                        C.sourceHandle h
                            .| transform
                            .| CL.map Just
                            .| CA.sinkTBQueue q
                    liftIO $ atomically (TQ.writeTBQueue q Nothing)
    CA.gatherFrom 8 prod .| untilNothing

genericAsyncTo :: forall m. (MonadIO m, MonadUnliftIO m) => C.ConduitT B.ByteString B.ByteString (R.ResourceT IO) () -> Handle -> C.ConduitT B.ByteString C.Void m ()

            
Data/Conduit/Algorithms/Async.hs
    let drain q = liftIO . C.runConduitRes $
                CA.sourceTBQueue q
                    .| untilNothing
                    .| CL.map (B.concat . reverse)
                    .| tranform
                    .| C.sinkHandle h
    bsConcatTo ((2 :: Int) ^ (15 :: Int))
        .| CA.drainTo 8 drain


            
Data/Conduit/Algorithms/Async.hs
                    C.runConduit $
                        C.sourceHandle h
                            .| CZ.multiple CZ.ungzip
                            .| CL.map Just
                            .| CA.sinkTBQueue q
                    atomically (TQ.writeTBQueue q Nothing)
    (CA.gatherFrom 8 prod .| untilNothing)
        `C.catchC`
        (\(e :: SZ.ZlibException) -> liftIO . ioError $ mkIOError userErrorType ("Error reading gzip file: "++displayException e) (Just h) Nothing)

            
Data/Conduit/Algorithms/Storable.hs
--
-- See 'readStorableV'
writeStorableV :: forall m a. (MonadIO m, Monad m, Storable a) => C.ConduitT (VS.Vector a) B.ByteString m ()
writeStorableV = CL.mapM (liftIO. encodeStorable')
    where
        encodeStorable' :: Storable a => VS.Vector a -> IO B.ByteString
        encodeStorable' v' = VS.unsafeWith v' $ \p ->
                                    B.packCStringLen (castPtr p, VS.length v' * (sizeOf (undefined :: a)))


            
tests/Tests.hs
    where
        expected = sort (concat [i1, i2, i3])
        mYield lst = do
            let lst' = map return lst
            forM_ lst' $ \elemnt -> do
                elemnt' <- elemnt
                C.yield elemnt'
        i1 = [ 0, 2, 4 :: Int]
        i2 = [ 1, 3, 4, 5]

            
tests/Tests.hs
    let testdata = [0 :: Int .. 12]
    C.runConduitRes $
        CC.yieldMany testdata
            .| CL.map (B8.pack . (\n -> show n ++ "\n"))
            .| CAlg.asyncGzipToFile testingFileNameGZ
    C.runConduitRes $
        CAlg.asyncGzipFromFile testingFileNameGZ
        .| CAlg.asyncGzipToFile testingFileNameGZ2
    shouldProduceIO testdata $