From b7fa4b3c65e39a024e472fb02bdc56d48a27035f Mon Sep 17 00:00:00 2001
From: Kr1ss
Date: Thu, 5 Dec 2019 22:09:28 +0100
Subject: adopt package & update: wapiti 3.0.2-1

upstream release
---
 .SRCINFO  |  15 +-
 ChangeLog | 486 ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
 PKGBUILD  |  41 ++++--
 3 files changed, 526 insertions(+), 16 deletions(-)
 create mode 100644 ChangeLog

diff --git a/.SRCINFO b/.SRCINFO
index 0a6b7c5..ba75a99 100644
--- a/.SRCINFO
+++ b/.SRCINFO
@@ -1,12 +1,12 @@
 pkgbase = wapiti
-	pkgdesc = A vulnerability scanner for web applications. It currently search vulnerabilities like XSS, SQL and XPath injections, file inclusions, command execution, LDAP injections, CRLF injections...
-	pkgver = 3.0.1
+	pkgdesc = A comprehensive web app vulnerability scanner written in Python
+	pkgver = 3.0.2
 	pkgrel = 1
 	url = http://wapiti.sourceforge.net/
+	changelog = ChangeLog
 	arch = any
 	license = GPL
-	depends = python
-	depends = python-setuptools
+	makedepends = python-setuptools
 	depends = python-requests
 	depends = python-beautifulsoup4
 	depends = python-lxml
@@ -14,8 +14,11 @@ pkgbase = wapiti
 	depends = python-yaswfp
 	depends = python-mako
 	depends = python-pysocks
-	source = http://downloads.sourceforge.net/sourceforge/wapiti/wapiti/wapiti-3.0.1/wapiti3-3.0.1.tar.gz
-	sha256sums = bbb8c8f572afe77319734489a6ca0b211df4b87ad294db79b8bf0bda1c5aff29
+	optdepends = python-requests-kerberos: Kerberos authentication
+	optdepends = python-requests-ntlm: NTLM authentication
+	options = zipman
+	source = http://downloads.sourceforge.net/sourceforge/wapiti/wapiti/wapiti-3.0.2/wapiti3-3.0.2.tar.gz
+	sha256sums = df86cab9f66c7794cab54fede16029056a764f5da565b2695524f9bd2bc9a384
 
 pkgname = wapiti
 
diff --git a/ChangeLog b/ChangeLog
new file mode 100644
index 0000000..25d0b58
--- /dev/null
+++ b/ChangeLog
@@ -0,0 +1,486 @@
+02/09/2019
+    Wapiti 3.0.2
+    New XXE module cans end payloads in parameters, query string, file uploads and raw body.
+    New module for detection Open Redirect vulnerabilities (header based our HTML meta based or JS based).
+    Fixed domain scope scanning.
+    Reduced false positives in attack modules (specially time based ones).
+    Reduced invalid links generated by js analysis and ignore obviously malformed HTML links.
+    Do not crawl CSS files and remove query strings from JS files when crawling.
+    Improved and changed existing payloads.
+    Improved extracting forms from HTML pages (radio buttons / select, ...)
+    Support for more POST enctypes (sending XML or JSON for example, currently only leveraged by mod_xxe)
+    --store-session option allow to specify a path where .db and .pkl files are stored.
+    --endpoint --internal-endpoint --external-endpoint options to set your own endpoint and receive requests from target
+    Authentications options can now be used with wapiti-getcookie.
+    Js parser can now deal with HTML comments.
+    More comprehensive choices when doing Ctrl+C during scan (eg: 'c' to continue, 'q' to quit)
+    Fixed lot of bugs thank to received crash dumps.
+
+11/05/2018
+    Wapiti 3.0.1
+    New module mod_methods to detect interesting methods which might be allowed by scripts (PUT, PROPFIND, etc)
+    New module mod_ssrf to detect Server Side Request Forgery vulnerabilities (requires Internet access)
+    Improved mod_xss and mod_permanentxss modules to reduce false positives.
+    Changed some XSS payloads for something more visual (banner at top the the webpage).
+    Changed bug reporting URL.
+    Fixed issue #54 in lamejs JS parser.
+    Removed lxml and libxml2 as a dependency. That parser have difficulties to parse exotic encodings.
+
+03/01/2018
+    Release of Wapiti 3.0.0
+
+02/01/2018
+    Added --list-modules and --resume-crawl options.
+
+23/12/2017
+    Ported to Python3.
+    Persister rewritten to use sqlite3 databases (for session management).
+    Added ascii-art because you know... it's an attack tool so it's required feature.
+    Changed output format (stdout) to something more like sqlmap output.
+    python-lxml and libxml2 are required dependencies unless you opt-out with --with-html5lib at setup.
+    SOCKS5 proxy support is back.
+    New -u mandatory option must be use to specify the base URL.
+    Added -d (--depth) option to limit the maximum depth of links following.
+    Added -H (--header) option to add HTTP headers to every request.
+    Added -A (--user-agent) option to set the User-Agent string.
+    Added --skip option to skip parameters during attacks.
+    Added -S (--scan-force) option to control the ammount of requests sent for attacks.
+    Added --max-parameters to not attack URLs anf forms having more than X input parameters.
+    Added -l (--level) option to allow attacking query strings without parameters.
+    Added --max-scan-time option to stop the scan after the given amount of minutes.
+    Added a buster module for directory and file busting.
+    Added a Shellshock detection module.
+    Added buitin list of well known parameters to skip during attack.
+    More control on execution flow when KeyboardInterrupt is triggered.
+    Reduced false-positives situations on time-based attacks (mainly blind_sql)
+    Replace getopt for argparse.
+    Fixed bugs related to obtaining user's locale (issue #20).
+    Enhancement to support new CVE notation [issue 37).
+    Can now report minor issues (notices) besides anomalies and vulnerabilities.
+    Added mod_delay module to report time consuming webpages.
+    Renamed some options (should be easier to remember).
+    More exec, file, xss payloads.
+    Fixed a bug with JSON cookie management for IPv6 addresses and custom ports.
+    XSS attack module can escape HTML comments for payload generation.
+    Fixed -r issue on URLs having only one parameter.
+    No SSL/TLS check by default (--verify-ssl behavior).
+    Added a Mutator class for easy payload injection in parameters.
+    Rewrote report generators, added Mako as a dependency for HTML reports. Less JS.
+    Crash report are send to a website, opt-out with --no-bugreport.
+    Improvements on backup, sql and exec modules submitted by Milan Bartos.
+    Payload files can now include special flags that will be interpreted by Wapiti.
+    wapiti-cookie and wapiti-getcookie were merged in a new wapiti-getcookie tool.
+
+
+20/10/2013
+	Version 2.3.0
+	Fixed a colosseum of bugs, especially related to unicode.
+	Software is much more stable.
+	New report template for HTML (using Kube CSS).
+	Using v2.1.5 of Nikto database for mod_nikto.
+	Replaced httplib2 with (python-)requests for everything related to HTTP.
+	Remove BeautifulSoup from package. It is still required however.
+	Core rewrite (PEP8 + more Pythonic)
+	New payloads for the backup, XSS, blind SQL, exec and file modules + more
+	detection rules.
+	So many improvements on lswww (crawler) that I can't make a list here. But
+	Wapiti reached 48% on Wivet.
+	Wapiti cookie format is now based on JSON.
+	Removed SOCKS proxy support (you will have to use a HTTP to SOCKS proxy).
+	Added a HTTPResource class for easier module creation.
+	Code restructuration for better setup.
+	Attack of parameters in query string even for HTTP POST requests.
+	Attack on file uploads (injection in file names).
+	Simpler (and less buggy) colored output with -c.
+	A CURL PoC is given for each vulnerability/anomaly found + raw HTTP
+	request representation in reports.
+	No more parameter reordering + can handle parameters repetition.
+	Added a JSON report generator + fixed the HTML report generator.
+	Added an option to not check SSL certificates.
+	mod_xss : noscipt tag escaping.
+	Can work on parameters that don't have a value in query string.
+	mod_crlf is not activated by default anymore (must call it with -m).
+	Startings URLs (-s) will be fetched even if out of scope.
+	Proxy support for wapiti-getcookie. and wapiti-cookie.
+	Attempt to bring an OpenVAS report generator.
+	Added an home-made SWF parser to extract URLs from flash files.
+	Added an home-made (and more than basic) JS interpreter based on the
+	pynarcissus parser. Lot of work still needs to be done on this.
+	New logo and webpage at wapiti.sf.net.
+	Added german and malaysian translations.
+	Added a script to create standalone archive for Windows (with py2exe).
+
+29/12/2009
+    Version 2.2.1 (already)
+	Bugfixes only
+	Fixed a bug in lswww if root url is not given complete.
+	Fixed a bug in lswww with a call to BeautifulSoup made on non text files.
+	Fixed a bug that occured when verbosity = 2. Unicode error on stderr.
+	Check the document's content-type and extension before attacking files on
+	the query string.
+	Added a timeout check in the nikto module when downloading the database.
+
+28/12/2009
+	Version 2.2.0
+	Added a manpage.
+	Internationalization : translations of Wapiti in spanish and french.
+	Options -k and -i allow the scan to be saved and restored later.
+	Added option -b to set the scope of the scan based on the root url given.
+	Wrote a library to save handle cookies and save them in XML format.
+	Modules are now loaded dynamically with a dependency system.
+	Rewrote the -m option used to activate / deactivate attack modules.
+	New module to search for backup files of scripts on the target webserver.
+	New module to search for weakly configured .htaccess.
+	New module to search dangerous files based on the Nikto database.
+	Differ "raw" XSS from "urlencoded" XSS.
+	Updated BeautifulSoup to version 3.0.8.
+	Better encoding support for webpages (convert to Unicode)
+	Added "resource consumption" as a vulnerability type.
+	Fixed bug ID 2779441 "Python Version 2.5 required?"
+	Fixed bug with special characters in HTML reports.
+
+05/04/2008
+	Added more patterns for file handling vulnerabilities in PHP.
+	Added GET_SQL and POST_SQL as modules (-m) for attacks.
+	Modifier getcookie.py and cookie.py so they try to get the cookies
+	even if cookielib fails.
+
+27/03/2007
+	Updated ChangeLogs
+
+26/03/2009
+	Fixed bug ID 2433127. Comparison was made with HTTP error codes
+	on numeric values but httplib2 return the status code as a string.
+	Forbid httplib2 to handle HTTP redirections. Wapiti and lswww will
+	take care of this (more checks on urls...)
+	Fixed a bug with Blind SQL attacks (the same attack could be launched
+	several times)
+	Fixed an error in blindSQLPayloads.txt.
+	Changed the error message when Wapiti don't get any data from lswww.
+	Verifications to be sure blind SQL attacks won't be launched if "standard"
+	SQL attacks works.
+
+25/03/2009
+	Exported blind SQL payloads from the code. Now in config file
+	blindSQLPayloads.txt.
+	Set timeout for time-based BSQL attacks to timetout used for HTTP
+	requests + 1 second.
+	Added Blind SQL as a type of vulnerability in the report generator.
+	More verbosity for permanent XSS scan.
+	More docstrings.
+	Updated the REAME.
+
+24/03/2009
+	Added some docstring to the code.
+	Removed warnign on alpha code.
+	First Blind SQL Injection implementation in Wapiti.
+	Fixed some timeout errors.
+
+22/03/2009
+	Fixed character encoding error in sql injection module.
+	Changed the md5 and sha1 import in httplib2 to hashlib.
+
+28/11/2008
+	Google Charts API is added to generate the charts of the reports.
+
+15/11/2008
+	Re-integration of standard HTTP proxies in httplib2.
+	Integration of HTTP CONNECT tunneling in Wapiti.
+	Fixed bug ID 2257654 "getcookie.py error missing action in html form"
+
+02/11/2008
+	Integraded the proxy implementation of httplib2 in Wapiti.
+	Can now use SOCKSv5 and SOCKSv4 proxies.
+
+22/10/2008
+	Fixed a bug with Cookie headers.
+
+19/10/2008
+	Remplaced urllib2 by httplib2.
+	Wapiti now use persistent HTTP connections, speed up the scan.
+	Included a python SOCKS library.
+
+09/10/2008
+	Version 2.0.0-beta
+  Added the possibility to generate reports of the vulnerabilities found
+  in HTML, XML or plain-text format. See options -o and -f.
+	HTTP authentification now works.
+	Added the option -n (or --nice) to prevent endless loops during scanning.
+	More patterns for SQL vulnerability detection
+	Code refactoring : more clear and more object-oriented
+	New XSS function is now fully implemented
+  The payloads have been separated from the code into configuration files.
+	Updated BeautifulSoup
+
+15/09/2008
+  Version 1.1.7-alpha
+	Use GET method if not specified in "method" tag
+	Keep an history of XSS payloads
+	New XSS engine for GET method using a list of payloads to bypass filters
+	New module HTTP.py for http requests
+	Added fpassthru to file handling warnings
+	Added a new new detection string for MS-SQL, submitted by Joe McCray
+
+28/01/2007
+	Version 1.1.6
+	New version of lswww
+
+24/10/2006
+	Version 1.1.5
+	Wildcard exclusion with -x (--exclude) option
+
+22/10/2006
+	Fixed a typo in wapiti.py (setAuthCreddentials : one 'd' is enough)
+	Fixed a bug with set_auth_credentials.
+
+07/10/2006
+	Version 1.1.4
+	Some modifications have been made on getccokie.py so it can work
+	on Webmin (and probably more web applications)
+	Added -t (--timeout) option to set the timeout in seconds
+	Added -v (--verbose) option to set the verbosity. Three availables
+	modes :
+	0: only print found vulnerabilities
+	1: print current attacked urls (existing urls)
+	2: print every attack payload and url (very much informations... good
+	for debugging)
+	Wapiti is much more modular and comes with some functions to set scan
+	and attack options... look the code ;)
+	Some defaults options are availables as "modules" with option -m
+	(--module) :
+	GET_XSS: only scan for XSS with HTTP GET method (no post)
+	POST_XSS: XSS attacks using POST and not GET
+	GET_ALL: every attack without POST requests
+	
+12/08/2006
+	Version 1.1.3
+	Fixed the timeout bug with chunked responses
+	(ID = 1536565 on SourceForge)
+
+09/08/2006
+	Version 1.1.2
+	Fixed a bug with HTTP 500 and POST attacks
+
+05/08/2006
+	Version 1.1.1
+	Fixed the UnboundLocalError due to socket timeouts
+	(bug ID = 1534415 on SourceForge)
+
+27/07/2006
+	Version 1.1.0 with urllib2
+	Detection string for mysql_error()
+	Changed the mysql payload (see http://shiflett.org/archive/184 )
+	Modification of the README file
+
+22/07/2006
+	Added CRLF Injection.
+
+20/07/2006
+	Added LDAP Injection and Command Execution (eval, system, passthru...)
+
+11/07/2006
+	-r (--remove) option to remove parameters from URLs
+	Support for Basic HTTP Auth added but don't work with Python 2.4.
+	Proxy support.
+	Now use cookie files (option "-c file" or "--cookie file")
+	-u (--underline) option to highlight vulnerable parameter in URL
+	Detect more vulnerabilities.
+
+04/07/2006:
+	Now attacks scripts using QUERY_STRING as a parameter
+	(i.e. http://server/script?attackme)
+
+23/06/2006:
+	Version 1.0.1
+	Can now use cookies !! (use -c var=data or --cookie var=data)
+	Two utilities added : getcookie.py (interactive) and cookie.py (command line) to get a cookie.
+	Now on Sourceforge
+
+25/04/2006:
+	Version 1.0.0
+03/01/2018
+    Release of Wapiti 3.0.0
+
+23/12/2017
+    lswww is now renamed to Crawler.
+    All HTML parsing is now made with BeautifulSoup. lxml should be the parsing engine but it's possible to opt-out at
+    setup with --html5lib.
+    Analysis on JS in event handlers (onblur, onclick, etc)
+    Changed behavior ot 'page' scope, added 'url' scope.
+    Default mime type used for upload fields is image/gif.
+    Added yaswf as a dependency for SWF parsing.
+    Custom HTTP error codes check.
+    Fixed a bug with 'button' input types.
+    Updated pynarcissus with a python3 version for js parsing.
+    Rewrote "in scope" check.
+
+29/12/2009
+    Version 2.3.1
+    Fixed a bug in lswww if root url is not given complete.
+    Fixed a bug in lswww with a call to BeautifulSoup made on non text files.
+    Fixed a bug that occured when verbosity = 2. Unicode error on stderr.
+
+27/12/2009
+	Version 2.3.0
+	Internationalization and translation to english and spanish when called from
+	Wapiti.
+	Ability to save a scan session and restore it later (-i)
+	Added option -b to set the scope of the scan based on the root url given as
+	argument.
+	Fixed bug ID 2779441 "Python Version 2.5 required?"
+	Use an home made cookie library instead or urllib2's one.
+	Keep aditionnal informations on the webpages (headers + encoding)
+	Use BeautifulSoup to detect webpage encoding and handle parsing errors.
+	Fixed a bug when "a href" or "form action" have an empty string as value.
+	Better support of Unicode.
+
+26/03/2009
+	Version 2.2.0
+	Fixed bug ID 2433127 with HTTP 404 error codes.
+	Don't let httplib2 manage HTTP redirections : return the status code
+	and let lswww handle the new url.
+
+25/03/2009
+	Version 2.1.9
+	Added option -e (or --export)
+	Saves urls and forms data to a XML file.
+	We hope other fuzzers will allow importation of this file.
+
+24/03/2009
+	More verifications on timeout errors.
+
+22/03/2009
+	Version 2.1.8
+	Fixed bug ID: 2415094
+	Check on protocol found in hyperlinks was case-sentitive.
+	Moved it to non-case-sensitive.
+	Integration of a second linkParser class called linkParser2 from
+	lswwwv2.py. This parser use only regexp to extract links and forms.
+
+25/11/2008
+	httplib2 use lowercase names for the HTTP headers in opposition to
+	urllib2 (first letter was uppercase).
+	Changed the verifications on headers.
+
+15/11/2008
+	Fixed a bug with links going to parrent directory.
+
+02/11/2008
+	Better integration of proxy support provided by httplib2.
+	It's now possible to use SOCKS proxies.
+
+19/10/2008
+	Version 2.1.7
+	Now use httplib2 (http://code.google.com/p/httplib2/)n MIT licence
+	instead of urllib2.
+	The ability to use persistents connections makes the scan faster.
+
+09/10/2008
+	Version 2.1.6
+	HTTP authentification now works
+	Added the option -n (or --nice) to prevent endless loops during scanning
+
+28/01/2007
+	Version 2.1.5
+	First take a look at the Content-Type instead of the document extension
+	Added BeautifulSoup as an optionnal module to correct bad html documents
+	(better use tidy if you can)
+
+24/10/2006
+	Version 2.1.4
+	Wildcard exclusion with -x (--exclude) option
+
+22/10/2006
+	Fixed an error with url parameters handling that appeared in precedent
+	version.
+	Fixed a typo in lswww.py (setAuthCreddentials : one 'd' is enough)
+
+07/10/2006
+	Version 2.1.3
+	Three verbose mode with -v (--verbose) option
+	0: print only results
+	1: print dots for each page accessed (default mode)
+	2: print each found url durring scan
+	Timeout in seconds can be set with -t (--timeout) option
+	Fixed bug "crash when no content-type is returned"
+	Fixed an error with 404 webpages
+	Fixed a bug when the only parameter of an url is a forbidden one
+
+09/08/2006
+	Version 2.1.2
+	Fixed a bug with regular expressions
+
+05/08/2006
+	Version 2.1.1
+	Remove redundant slashes from urls
+	(e.g. http://server/dir//page.php converted to
+	http://server/dir/page.php)
+
+20/07/2006
+	Version 2.1.0 with urllib2
+
+11/07/2006
+	-r (--remove) option to remove parameters from URLs
+	Generate URL with GET forms instead of using POST by default
+	Support for Basic HTTP Auth added but don't work with Python 2.4.
+	Now use cookie files (option "-c file" or "--cookie file")
+	Extracts links from Location header fields
+	
+
+06/07/2006
+	Extract links from "Location:" headers (HTTP 301 and 302)
+	Default type for "input" elements is set to "text"
+	(as written in the HTML 4.0 specifications)
+	Added "search" in input types (created for Safari browsers)
+
+04/07/2006
+	Fixed a bug with empty parameters tuples
+	(convert http://server/page?&a=2 to http://server/page?a=2)
+
+23/06/2006
+	Version 2.0.1
+	Take care of the "submit" type
+	No extra data sent when a page contains several forms
+	Corrected a bug with urls finishing by '?'
+	Support Cookies !!
+
+25/04/2006
+	Version 2.0
+	Extraction des formulaires sous la forme d'une liste de tuples
+	contenant chacun un string (url du script cible) et un dict
+	contenant les noms des champs et leur valeur par d�faut (ou 'true'
+	si vide)
+	Recense les scripts gerant l'upload
+	Peut maintenant fonctionner comme module
+
+19/04/2006
+	Version 1.1
+	Lecture des tags insensible a la casse
+	Gestion du Ctrl+C pour interrompre proprement le programme
+	Extraction des urls dans les balises form (action)
+
+12/10/2005
+	Version 1.0
+	Gestion des liens syntaxiquement valides mais pointant
+	vers des ressources inexistantes (404)
+
+11/09/2005
+	Beta4
+	Utilisation du module getopt qui permet de specifier
+	facilement les urls a visiter en premier, les urls a
+	exclure (nouveau !) ou encore le proxy a utiliser
+
+24/08/2005
+	Beta3
+	Ajout d'un timeout pour la lecture des pages pour ne pas
+	bloquer sur un script bugge
+
+23/08/2005
+	Version beta2
+	Prise en charge des indexs generes par Apache
+	Filtre sur les protocoles
+	Gestion des liens qui remontent l'arborescence
+	Gestion des liens vides
+
+02/08/2005
+	Sortie de la beta1
diff --git a/PKGBUILD b/PKGBUILD
index 6004eda..79b378f 100644
--- a/PKGBUILD
+++ b/PKGBUILD
@@ -1,18 +1,39 @@
-# Maintainer: mickael9 <mickael9 at gmail dot com>
+# Maintainer :  Kr1ss $(echo \<kr1ss+x-yandex+com\>|sed s/\+/./g\;s/\-/@/)
+# Contributor : mickael9 <mickael9 at gmail dot com>
+
 
 pkgname=wapiti
-pkgver=3.0.1
+
+pkgver=3.0.2
 pkgrel=1
-pkgdesc="A vulnerability scanner for web applications. It currently search vulnerabilities like XSS, SQL and XPath injections, file inclusions, command execution, LDAP injections, CRLF injections..."
+
+pkgdesc='A comprehensive web app vulnerability scanner written in Python'
+arch=('any')
 url='http://wapiti.sourceforge.net/'
-license=(GPL)
-depends=(python python-setuptools python-requests python-beautifulsoup4 python-lxml python-tld python-yaswfp python-mako python-pysocks)
-arch=(any)
+license=('GPL')
+
+depends=('python-requests' 'python-beautifulsoup4' 'python-lxml' 'python-tld'
+         'python-yaswfp' 'python-mako' 'python-pysocks')
+optdepends=('python-requests-kerberos: Kerberos authentication'
+            'python-requests-ntlm: NTLM authentication')
+makedepends=('python-setuptools')
+
+options=('zipman')
 
-source=("http://downloads.sourceforge.net/sourceforge/${pkgname}/${pkgname}/${pkgname}-${pkgver}/${pkgname}${pkgver:0:1}-${pkgver}.tar.gz")
-sha256sums=('bbb8c8f572afe77319734489a6ca0b211df4b87ad294db79b8bf0bda1c5aff29')
+changelog=ChangeLog
+source=("http://downloads.sourceforge.net/sourceforge/$pkgname/$pkgname/$pkgname-$pkgver/$pkgname${pkgver:0:1}-$pkgver.tar.gz")
+sha256sums=('df86cab9f66c7794cab54fede16029056a764f5da565b2695524f9bd2bc9a384')
+
+
+build() {
+    cd "$pkgname${pkgver:0:1}-$pkgver"
+    python setup.py build
+}
 
 package() {
-    cd "${srcdir}/${pkgname}${pkgver:0:1}-${pkgver}"
-    python setup.py install --root="${pkgdir}/" --optimize=1
+    cd "$pkgname${pkgver:0:1}-$pkgver"
+    python setup.py install --root="$pkgdir" --optimize=1 --skip-build
 }
+
+
+# vim: ts=2 sw=2 et ft=PKGBUILD:
-- 
cgit v1.2.3-70-g09d2