Compare commits

..

No commits in common. "master" and "master" have entirely different histories.

4 changed files with 10 additions and 83 deletions

View File

@ -1,7 +0,0 @@
# Build and run project
`stack install`
`stack build`
`stack exec czasopisma`

View File

@ -33,7 +33,7 @@ import Data.Tree.NTree.TypeDefs
import Data.Maybe
import Control.Monad.Trans
import Text.XML.HXT.XPath
import Text.XML.HXT.Curl
-- import Text.XML.HXT.Curl
import Text.XML.HXT.HTTP
import Text.Regex.TDFA
@ -64,7 +64,8 @@ downloadDocument = readFromDocument [withParseHTML yes,
withEncodingErrors no,
withPreserveComment yes,
withStrictInput yes,
withCurl [("curl--user-agent","AMU Digital Libraries Indexing Agent")]
withHTTP []
-- withCurl [("curl--user-agent","AMU Digital Libraries Indexing Agent")]
]
downloadDocumentWithEncoding enc = readFromDocument [withParseHTML yes,
@ -72,12 +73,13 @@ downloadDocumentWithEncoding enc = readFromDocument [withParseHTML yes,
withEncodingErrors no,
withPreserveComment yes,
withInputEncoding enc,
withCurl []]
withHTTP []]
-- withCurl []]
downloadXmlDocument = readFromDocument [withWarnings no,
withEncodingErrors no,
-- withHTTP []]
withCurl [] ]
withHTTP []]
-- withCurl [] ]
data ShadowLibrary = ShadowLibrary { logoUrl :: Maybe String,

View File

@ -1,54 +0,0 @@
{-# LANGUAGE Arrows, NoMonomorphismRestriction #-}
import ShadowLibrary.Core
import Text.XML.HXT.Core
import Text.XML.HXT.XPath
-- import Text.XML.HXT.Curl
import Data.List
import Data.List.Utils (replace)
import Text.Regex.Posix
import Text.Printf
extractRecords = extractLinksWithText "" -- pary adres-tytuł
>>> first (extractLinksWithText "//a[@target='_blank'][contains(@href,'.pdf')]") -- pobieramy stronę z adresu URL i wyciągamy linki z tej strony pasujące do wyrażenia XPathowego
-- ostatecznie wyjdą trójki ((adres URL, tytuł artykułu), tytuł rocznika)
toShadowItem :: ((String, String), String) -> ShadowItem
toShadowItem ((url, articleTitle), yearlyTitle) =
(defaultShadowItem url title) {
originalDate = Just date,
itype = "periodical",
format = Just "pdf",
finalUrl = url
}
where homeTitle = getTitle url
title = replace "_" " " homeTitle ++ "ROK " ++ date ++ " " ++ articleTitle
date = getDate url
getDate url =
case url =~~ "_(19[0-9][0-9]|20[0-9][0-9])" :: Maybe [[String]] of
Just [[_, year]] -> year
otherwise -> error $ "unexpected url: " ++ url
getTitle url =
case url =~~ "/([A-Za-z]+_|[A-Za-z]+_[A-Za-z]+_|[A-Za-z]+_[A-Za-z]+_[A-Za-z]+_|[A-Za-z]+_[A-Za-z]+_[A-Za-z]+_[A-Za-z]+_|[A-Za-z]+_[A-Za-z]+_[A-Za-z]+_[A-Za-z]+_[A-Za-z]+_)" :: Maybe [[String]] of
Just [[_, title]] -> title
otherwise -> error $ "unexpected url2: " ++ url
main = do
let start = "https://www.czasopisma.centralnabibliotekapttk.pl"
let shadowLibrary = ShadowLibrary {logoUrl=Nothing,
lname="Czasopisma PTTK",
abbrev="czasopisma",
lLevel=0,
webpage=start}
putStrLn "Program started"
extractItemsStartingFromUrl shadowLibrary start (extractRecords >>> arr toShadowItem)
putStrLn "Page sucesfully crawled"

View File

@ -5,8 +5,8 @@ description: Please see README.md
homepage: http://github.com/name/project
license: Proprietary
license-file: LICENSE
author: Szymon Parafiński
maintainer: szypar@st.amu.edu.pl
author: Your name here
maintainer: your.address@example.com
-- copyright:
category: Web
build-type: Simple
@ -20,7 +20,6 @@ library
, HTTP
, hxt
, hxt-http
, hxt-curl
, hxt-xpath
, MissingH
, monad-logger
@ -55,25 +54,12 @@ executable almanachmuszyny
build-depends: base
, hxt
, hxt-xpath
, hxt-curl
, MissingH
, regex-posix
, shadow-library
default-language: Haskell2010
executable czasopisma
hs-source-dirs: app
main-is: czasopisma.hs
ghc-options: -threaded -rtsopts -with-rtsopts=-N
build-depends: base
, hxt
, hxt-xpath
, hxt-curl
, MissingH
, regex-posix
, shadow-library
default-language: Haskell2010
source-repository head
type: git
location: https://git.wmi.amu.edu.pl/s444018/twilight-library
location: https://github.com/name/project