Compare commits

..

No commits in common. "master" and "v1.0.1" have entirely different histories.

8 changed files with 238 additions and 579 deletions

View File

@ -5,7 +5,7 @@ name: testsuite
steps:
- name: composer install
image: chialab/php:7.3
image: chialab/php
commands:
- composer install
volumes:
@ -13,21 +13,21 @@ steps:
path: /root/.composer/cache/
- name: wait for test server
image: alpine:3.12
image: alpine
commands:
- echo "Waiting for server to launch on testserver:8080..."
- while ! nc -z testserver 8080; do sleep 0.1 ; done
- echo "Ready!"
- name: run tests
image: chialab/php:7.3
image: chialab/php
commands:
- URL=http://testserver:8080 vendor/bin/phpunit tests --testdox --color=always --no-interaction
- URL=http://testserver:8080 vendor/bin/phpunit tests
services:
- name: testserver
image: node:15.2
image: node
detach: true
commands:
- cd tests/server/

View File

@ -3,7 +3,7 @@ Similar project to [spatie/http-status-check](https://github.com/spatie/http-sta
## Install
```plain
composer config repositories.jhodges composer https://composer.jhodges.co.uk/
composer config repositories.jhodges composer https://git.jhodges.co.uk/composer
composer require jhodges/sitemap
```

View File

@ -4,9 +4,8 @@
"type": "library",
"require": {
"php": "^7.1",
"spatie/crawler": "4.6.6",
"cweagans/composer-patches": "~1.0",
"guzzlehttp/guzzle": "6.5.2"
"spatie/crawler": "^4.6",
"cweagans/composer-patches": "~1.0"
},
"require-dev": {
"phpunit/phpunit": "^8.5"
@ -28,7 +27,7 @@
"add crawled again observer": "https://patch-diff.githubusercontent.com/raw/spatie/crawler/pull/280.patch"
},
"guzzlehttp/guzzle": {
"Status code must be an integer value between 1xx and 5xx": "https://github.com/guzzle/guzzle/commit/f81cd6cdff1213f90de8f012489017510e3d6ff4.patch"
"Status code must be an integer value between 1xx and 5xx": "https://patch-diff.githubusercontent.com/raw/guzzle/guzzle/pull/2591.patch"
}
}
}

699
composer.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -1,17 +0,0 @@
FROM markizano/devuan:beowulf-amd64
RUN apt-get update
RUN apt-get install -y php-cli git unzip php-xml
COPY get_composer.sh /root/
RUN bash /root/get_composer.sh && rm /root/get_composer.sh
RUN mkdir /app
WORKDIR /app
RUN composer init && composer config repositories.jhodges composer https://composer.jhodges.co.uk && composer require jhodges/sitemap
COPY crawl.php /app/
RUN apt clean
ENTRYPOINT php crawl.php

View File

@ -1,45 +0,0 @@
<?php
require_once(__DIR__.'/vendor/autoload.php');
use \JHodges\Sitemap\Crawler;
if($url=getenv('CRAWL_URL')){
$urls=array_filter(array_map('trim',explode(',',$url)));
}else{
die("No env: CRAWL_URL\n");
}
if($code=getenv('CRAWL_CODE')){
$codes=array_filter(array_map('trim',explode(',',$code)));
}else{
$codes=[];
}
$crawler=new Crawler();
foreach($urls as $url){
$crawler->crawl($url);
}
$summary=[];
$details='';
foreach($crawler->getResults() as $url=>$result){
$summary[$result['code']]++;
if( count($codes)==0 || in_array($result['code'],$codes) ){
$details.="{$result['code']} {$url}\n";
foreach($result['foundOn'] as $url=>$count){
$details.=" <- ($count) $url\n";
}
}
}
ksort($summary);
echo '|code|count|'."\n";
echo '|----|-----|'."\n";
foreach($summary as $code=>$count){
echo "| $code | $count |\n";
}
if($details){
echo "\n\n----\n\n```\n$details\n```\n";
}

View File

@ -1,17 +0,0 @@
#!/bin/sh
EXPECTED_CHECKSUM="$(wget -q -O - https://composer.github.io/installer.sig)"
php -r "copy('https://getcomposer.org/installer', 'composer-setup.php');"
ACTUAL_CHECKSUM="$(php -r "echo hash_file('sha384', 'composer-setup.php');")"
if [ "$EXPECTED_CHECKSUM" != "$ACTUAL_CHECKSUM" ]
then
>&2 echo 'ERROR: Invalid installer checksum'
rm composer-setup.php
exit 1
fi
php composer-setup.php --1 --filename=composer --install-dir=/usr/local/bin
RESULT=$?
rm composer-setup.php
exit $RESULT

View File

@ -9,15 +9,14 @@ use Psr\Http\Message\UriInterface;
use Spatie\Crawler\Crawler as SpatieCrawler;
use Spatie\Crawler\CrawlUrl;
use Spatie\Crawler\CrawlAllUrls;
use Spatie\Crawler\CrawlProfile;
use Spatie\Crawler\CrawlInternalUrls;
class Crawler{
private $observer;
private $crawler;
public function __construct(array $reqOps=[]){
public function __construct($reqOps=[]){
$this->crawler = SpatieCrawler::create(array_merge($reqOps, [
RequestOptions::ALLOW_REDIRECTS => [
'track_redirects' => true,
@ -26,24 +25,13 @@ class Crawler{
$this->observer = new CrawlObserver();
$this->crawler->setCrawlObserver($this->observer);
$this->crawler->setCrawlProfile(new CrawlAllUrls());
}
public function setUserAgent(String $agent) : self{
$this->crawler->setUserAgent($agent);
return $this;
}
public function setCrawlProfile(CrawlProfile $p) : self{
$this->crawler->setCrawlProfile($p);
return $this;
}
public function crawl(String $url) : void{
public function crawl($url){
$this->crawler->startCrawling($url);
}
public function getResults() : array{
public function getResults(){
return $this->observer->results;
}