Compare commits
8 Commits
Author | SHA1 | Date | |
---|---|---|---|
|
b66496c640 | ||
|
1d4cb55557 | ||
05c993ec51 | |||
|
03ffdeb1f7 | ||
|
d8e664eba4 | ||
|
6ee937aa12 | ||
|
5cf2c07416 | ||
107793cbf1 |
11
README.md
Normal file
11
README.md
Normal file
@ -0,0 +1,11 @@
|
|||||||
|
# Zadanie "robot haskell"
|
||||||
|
|
||||||
|
## Przykładowy output:
|
||||||
|
```ShadowItem {url = Just "https://aneks.kulturaliberalna.pl/wp-content/uploads/2016/02/51%C3%94%C3%87%C3%B452-With-Watermark.pdf", title = "Aneks Nr 51\8211\&52 1988", itype = "periodical", originalDate = Just "2016", creator = Nothing, format = Just "pdf", lang = Just "pol", finalUrl = "https://aneks.kulturaliberalna.pl/wp-content/uploads/2016/02/51%C3%94%C3%87%C3%B452-With-Watermark.pdf", description = Nothing}```
|
||||||
|
|
||||||
|
## Ekstrakcja info o .pdf ze strony https://aneks.kulturaliberalna.pl/archiwum-aneksu/:
|
||||||
|
```haskell
|
||||||
|
extractRecords = extractLinksWithText "//a[contains(@title,'Aneks') and contains(text(),'Nr')]"
|
||||||
|
>>> second (arr $ replace "\n" "")
|
||||||
|
>>> first (extractLinksWithText "//div/a[contains(@href,'.pdf')]") -- pobieramy stronę z adresu URL i wyciągamy linki z tej strony pasujące do wyrażenia XPathowego
|
||||||
|
-- ostatecznie wyjdą trójki? ((Link, tekst: "Wyświetl cały numer"), Numer Magazynu)```
|
@ -33,7 +33,7 @@ import Data.Tree.NTree.TypeDefs
|
|||||||
import Data.Maybe
|
import Data.Maybe
|
||||||
import Control.Monad.Trans
|
import Control.Monad.Trans
|
||||||
import Text.XML.HXT.XPath
|
import Text.XML.HXT.XPath
|
||||||
-- import Text.XML.HXT.Curl
|
import Text.XML.HXT.Curl
|
||||||
import Text.XML.HXT.HTTP
|
import Text.XML.HXT.HTTP
|
||||||
|
|
||||||
import Text.Regex.TDFA
|
import Text.Regex.TDFA
|
||||||
@ -64,8 +64,8 @@ downloadDocument = readFromDocument [withParseHTML yes,
|
|||||||
withEncodingErrors no,
|
withEncodingErrors no,
|
||||||
withPreserveComment yes,
|
withPreserveComment yes,
|
||||||
withStrictInput yes,
|
withStrictInput yes,
|
||||||
withHTTP []
|
-- withHTTP []
|
||||||
-- withCurl [("curl--user-agent","AMU Digital Libraries Indexing Agent")]
|
withCurl [("curl--user-agent","AMU Digital Libraries Indexing Agent")]
|
||||||
]
|
]
|
||||||
|
|
||||||
downloadDocumentWithEncoding enc = readFromDocument [withParseHTML yes,
|
downloadDocumentWithEncoding enc = readFromDocument [withParseHTML yes,
|
||||||
@ -73,13 +73,13 @@ downloadDocumentWithEncoding enc = readFromDocument [withParseHTML yes,
|
|||||||
withEncodingErrors no,
|
withEncodingErrors no,
|
||||||
withPreserveComment yes,
|
withPreserveComment yes,
|
||||||
withInputEncoding enc,
|
withInputEncoding enc,
|
||||||
withHTTP []]
|
-- withHTTP []]
|
||||||
-- withCurl []]
|
withCurl []]
|
||||||
|
|
||||||
downloadXmlDocument = readFromDocument [withWarnings no,
|
downloadXmlDocument = readFromDocument [withWarnings no,
|
||||||
withEncodingErrors no,
|
withEncodingErrors no,
|
||||||
withHTTP []]
|
-- withHTTP []]
|
||||||
-- withCurl [] ]
|
withCurl [] ]
|
||||||
|
|
||||||
|
|
||||||
data ShadowLibrary = ShadowLibrary { logoUrl :: Maybe String,
|
data ShadowLibrary = ShadowLibrary { logoUrl :: Maybe String,
|
||||||
|
@ -4,7 +4,7 @@ import ShadowLibrary.Core
|
|||||||
|
|
||||||
import Text.XML.HXT.Core
|
import Text.XML.HXT.Core
|
||||||
import Text.XML.HXT.XPath
|
import Text.XML.HXT.XPath
|
||||||
-- import Text.XML.HXT.Curl
|
--import Text.XML.HXT.Curl
|
||||||
import Data.List
|
import Data.List
|
||||||
import Data.List.Utils (replace)
|
import Data.List.Utils (replace)
|
||||||
|
|
||||||
|
46
app/aneks.hs
Normal file
46
app/aneks.hs
Normal file
@ -0,0 +1,46 @@
|
|||||||
|
|
||||||
|
{-# LANGUAGE Arrows, NoMonomorphismRestriction #-}
|
||||||
|
import ShadowLibrary.Core
|
||||||
|
|
||||||
|
import Text.XML.HXT.Core
|
||||||
|
import Text.XML.HXT.XPath
|
||||||
|
import Text.XML.HXT.Curl
|
||||||
|
import Data.List
|
||||||
|
import Data.List.Utils (replace)
|
||||||
|
|
||||||
|
import Text.Regex.Posix
|
||||||
|
import Text.Printf
|
||||||
|
|
||||||
|
|
||||||
|
extractRecords = extractLinksWithText "//a[contains(@title,'Aneks') and contains(text(),'Nr')]"
|
||||||
|
>>> second (arr $ replace "\n" "")
|
||||||
|
>>> first (extractLinksWithText "//div/a[contains(@href,'.pdf')]") -- pobieramy stronę z adresu URL i wyciągamy linki z tej strony pasujące do wyrażenia XPathowego
|
||||||
|
-- ostatecznie wyjdą trójki? ((Link, tekst: "Wyświetl cały numer"), Numer Magazynu)
|
||||||
|
|
||||||
|
|
||||||
|
-- ... a tutaj te trójki przerabiamy do docelowej struktury ShadowItem
|
||||||
|
toShadowItem :: ((String, String), String) -> ShadowItem
|
||||||
|
toShadowItem ((url, blank), yearlyTitle) =
|
||||||
|
(defaultShadowItem url title) {
|
||||||
|
originalDate = Just date,
|
||||||
|
itype = "periodical",
|
||||||
|
format = Just "pdf",
|
||||||
|
finalUrl = url
|
||||||
|
}
|
||||||
|
where title = "Aneks " ++ yearlyTitle
|
||||||
|
date = getDate url
|
||||||
|
|
||||||
|
getDate url =
|
||||||
|
case url =~~ "/(19[0-9][0-9]|20[0-9][0-9])/" :: Maybe [[String]] of
|
||||||
|
Just [[_, year]] -> year
|
||||||
|
otherwise -> error $ "unexpected url: " ++ url
|
||||||
|
|
||||||
|
|
||||||
|
main = do
|
||||||
|
let start = "https://aneks.kulturaliberalna.pl/archiwum-aneksu/"
|
||||||
|
let shadowLibrary = ShadowLibrary {logoUrl=Nothing,
|
||||||
|
lname="Archiwum Aneksu",
|
||||||
|
abbrev="ArchAnek",
|
||||||
|
lLevel=0,
|
||||||
|
webpage=start}
|
||||||
|
extractItemsStartingFromUrl shadowLibrary start (extractRecords >>> arr toShadowItem)
|
@ -20,6 +20,7 @@ library
|
|||||||
, HTTP
|
, HTTP
|
||||||
, hxt
|
, hxt
|
||||||
, hxt-http
|
, hxt-http
|
||||||
|
, hxt-curl
|
||||||
, hxt-xpath
|
, hxt-xpath
|
||||||
, MissingH
|
, MissingH
|
||||||
, monad-logger
|
, monad-logger
|
||||||
@ -54,12 +55,27 @@ executable almanachmuszyny
|
|||||||
build-depends: base
|
build-depends: base
|
||||||
, hxt
|
, hxt
|
||||||
, hxt-xpath
|
, hxt-xpath
|
||||||
|
, hxt-curl
|
||||||
|
, MissingH
|
||||||
|
, regex-posix
|
||||||
|
, shadow-library
|
||||||
|
default-language: Haskell2010
|
||||||
|
|
||||||
|
executable aneks
|
||||||
|
hs-source-dirs: app
|
||||||
|
main-is: aneks.hs
|
||||||
|
ghc-options: -threaded -rtsopts -with-rtsopts=-N
|
||||||
|
build-depends: base
|
||||||
|
, hxt
|
||||||
|
, hxt-xpath
|
||||||
|
, hxt-curl
|
||||||
, MissingH
|
, MissingH
|
||||||
, regex-posix
|
, regex-posix
|
||||||
, shadow-library
|
, shadow-library
|
||||||
default-language: Haskell2010
|
default-language: Haskell2010
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
source-repository head
|
source-repository head
|
||||||
type: git
|
type: git
|
||||||
location: https://github.com/name/project
|
location: https://github.com/name/project
|
||||||
|
19
stack.yaml.lock
Normal file
19
stack.yaml.lock
Normal file
@ -0,0 +1,19 @@
|
|||||||
|
# This file was autogenerated by Stack.
|
||||||
|
# You should not edit this file by hand.
|
||||||
|
# For more information, please see the documentation at:
|
||||||
|
# https://docs.haskellstack.org/en/stable/lock_files
|
||||||
|
|
||||||
|
packages:
|
||||||
|
- completed:
|
||||||
|
hackage: hxt-xpath-9.1.2.2@sha256:9cd590ae93a04573db8f90fa4094625ebd97dded45da7667c577ce6b38a42900,1999
|
||||||
|
pantry-tree:
|
||||||
|
size: 2225
|
||||||
|
sha256: aee2f75974e868ff429b8ff349a29667536c60397098f5dfedc968d1951511bb
|
||||||
|
original:
|
||||||
|
hackage: hxt-xpath-9.1.2.2
|
||||||
|
snapshots:
|
||||||
|
- completed:
|
||||||
|
size: 507596
|
||||||
|
url: https://raw.githubusercontent.com/commercialhaskell/stackage-snapshots/master/lts/11/9.yaml
|
||||||
|
sha256: 42f472dbf06482da1b3319241f3e3b3593a45bd7d4f537d2789f21386b9b2ad3
|
||||||
|
original: lts-11.9
|
Loading…
Reference in New Issue
Block a user