Rick

Rick
Rick

Friday, May 7, 2010

This script has evolved quite a bit since I changed it to work with our new REST API.
It is a handy little script for functionally testing REST JSON end points and then storing the results as text files in SVN for later comparison with new test runs (and run against other instances).



#!/usr/bin/env groovy
verbose = false
execute = true
test = true
autogen = false
count = 0 //total number of tests
passed = 0 //number of tests that passed
generated = 0 //number of tests that failed
jsonExceptions = 0 //number of JSON expceptions
nullResults = 0
xmlFaults=0
ioExceptions=0
host=null
port=null
webapp=null
cwd = new File(".") //current working directory
inputDir = new File(cwd, "input") //Read test files from the inputDir


testsAllowNull = new File(cwd, "conf/expectNulls.conf").text.split(",") as List

println "Tests that allow nulls $testsAllowNull"


if (!processArgs(args)) {
System.exit(0)
}

def processArgs(args) {
def cli = new CliBuilder(usage: "testScript.groovy -[hoagpv]")

cli.with {
h longOpt: "help", "Show usage information"
o longOpt: "host", args: 1, "Set host name"
g longOpt: "autogen", "turn on autogen"
a longOpt: "webapp", args: 1, "set the webapplication URI, i.e., palmcs or ac"
p longOpt: "port", args:1, "Set the port for the URL"
v longOpt: "verbose", "Verbose mode"
i longOpt: "inputDir", args:1, "Specify the input dir"
}

options = cli.parse(args)
if (!options) System.exit(0)

if (options.o) host = options.o
if (options.p) port = options.p
if (options.a) webapp = options.a
if (options.i) inputDir = new File(options.i)
verbose = options.v
autogen = options.g

if (verbose) {
println """
host $host
port $port
webapp URI $webapp
autogen $autogen
verbose $verbose
input dir $inputDir
"""
}

if (options.h) {
cli.usage()
if (verbose) {
println """
The testScript.groovy is a test script that tests JSON payloads against JSON endpoints.
It essentially sends a JSON payload (from the input dir) and then receives a JSON payload.
Then it compares the JSON payload received with a corresponding reference file (should match).
It has some nice features that dump everything out to the file system (in formatted JSON) so you can use
Beyond Compare and/or Araxis to see the exact difference between the actual and the expected.

To get this message run:
./testScript.groovy -v -h

To run the script in verbose mode:
./testScript.groovy -v

To run the script in verbose mode:
./testScript.groovy -v

To run the script against a different host, port and webapp URI:
./testScript.groovy -onewhost -p9999 -anewwebapp

The above would run against
http://newhost:9999/newwebapp/services/deviceJ/addUserRating

You can specify host or (host and port) or (host and port and webapp).

To run the script against just a different host (same port, same webapp URI):

./testScript.groovy -onewhost

The above would run against
http://newhost:8101/ac/services/deviceJ/addUserRating

CWD is the current working directory.

Input files are searched for in CWD/input and have the following format:

http://localhost:8101/ac/services/deviceJ/appDeleted
{ "InAppDeleted" : {
"accountTokenInfo":{"token":"assds","deviceId":"asd","email":"test@a.com","carrier":""},
"publicApplicationId":"com.palm.app.ace",
"version" : "1.0"
}
}

The first line is the URL (which can be changed via -o, -p and -a options).
The rest of the file is the JSON payload.

The files to compare the JSON payload from the server against are under CWD/compare.

If the file comparison fails, the payload from the server is stored in CWD/failed_compare so you can use
Beyond Compare or Araxis Merge or their ilk to see the differences easily.

When you first run the tests against a new server, you may need/want to create some reference JSON payloads to compare.
You can do this with the autogenerate option as follows:

./testScript.groovy -olocalhost -p8101 -apalmcsext -g

The above would create a new set of comparison JSON payloads for host localhost, port 8101,
and webapp name palmcsext.
It is up to you to look at the file contents and see if they are correct.
Once you get a set of correct files you can use them for integration testing.

"""
} else {
println "To see example usage run -h with -v (help and verbose)"
}
return false
}

return true

}



/* Read all test scripts that end in .tst in the input directory. */
inputDir.eachFileMatch(~/^.*\.tst$/) {File inFile->
processFile(inFile)
}

def processURL(url) {
url = url.toURL()
boolean urlEndsWithSlash=url.path.endsWith("/")

if (host || port || webapp) {
if (host && !port && !webapp) {
url = "http://${host}:${url.port}${url.path}${urlEndsWithSlash?"/":""}${url.query?"?${url.query}":""}"
} else if (host && port && !webapp) {
url = "http://${host}:${port}${url.path}${urlEndsWithSlash?"/":""}${url.query?"?${url.query}":""}"
} else if (host && port && webapp) {
restOfPathAfterWebApp = (url.path.split("/") as List)[2..-1].join("/")
url = "http://${host}:${port}/${webapp}/${restOfPathAfterWebApp}${urlEndsWithSlash?"/":""}${url.query?"?${url.query}":""}"
}
}

println "URL WE ARE GOING TO RUN ${url}"
return (urlEndsWithSlash ? url + "/" : url).toURL()
}

/* Process the input file. */
def processFile(File testFile) {
println "Running test $testFile.name"
lines = testFile.readLines()
body = new StringBuilder(512)
url = ""
method = ""
headers = [:]
format = true

lines.eachWithIndex{line, index ->
if (index==0) {
url = line
} else {

if (line.startsWith("HEADER# ")) {
nv = line.split("# ")[1].split(":=")
headers[nv[0]]=nv[1]
} else if (line.startsWith("METHOD# ")) {
method = line.split("# ")[1]
} else if (line.startsWith("FORMAT# ")) {
sFormat = line.split("# ")[1]
if (sFormat.equalsIgnoreCase("off")) {
format = false
}
}
body.append(line)
body.append("\n")
}
}
body = body.toString()

if (verbose) println "$url \n $body"
url = processURL(url)

/* Create the input file name which is like ./compare/localhost_8101_ac/getAppDetail.tst.compare. */
compareDir = new File(cwd, "compare")
compareDir = new File(compareDir, "${url.host}_${url.port}_${url.path.split('/')[1]}_for_${inputDir.name}")
compareDir.mkdirs()
compareFile = new File(compareDir,
"${testFile.name}.compare")

if (verbose) println "Compare file $compareFile"

if (execute) {
execute(testFile, url, body, compareFile, method, headers, format)
}
}


/** Executes a JSON test based on a URL and a body. */
def execute(testFile, url, body, compareFile, method, headers, format) {
/* Execute the JSON payload body to the given URL. */
results = executeMethod(testFile, url, body, method, headers, format)

if (verbose) {
println "RESULTS:"
println results
}

/* Only run tests if flag is set (so you can use verbose to see what you would run) */
if(test) {
test(testFile, compareFile, results)
}

autogen(compareFile, results)
}


/** Automatically generates the output to compare with from the results from the server. */
def autogen(compareFile, results) {
if (verbose) println "--- AUTOGEN is $autogen $compareFile ${results!=null}--- "
if (autogen && !compareFile.exists() && results) {

compareFile.write(results)
generated++
}
}

/** Run the actual test/validation/assertions against the results. */
def test(testFile, compareFile, results) {
count++ //increment the number of tests run
allowNull = testFile.name in testsAllowNull
/* Check to see if the results are null unless they are allowed to be null. */
if (!results && !allowNull) {
nullResults++;
println "The test $testFile.name returned null"
/* Check to see if the results are a JSON Exception, a JSON exception may not be an error
* we need to improve this to check against a list of tests we expect JSON exceptions from.
*/
} else if (results && results.contains("JSONException")) {
jsonExceptions++
println "The test $testFile.name threw a JSONException"
/* Check to see if we got an XMLFault. An XML fault is always an error. */
} else if(results && results.contains("XMLFault")) {
xmlFaults++
println "The test $testFile.name threw an XMLFault"
/* Check to see if the results match the comparison file on record. */
} else if (results && compareFile.exists()) {
/* Remove all whitespace and see if the string from the server matches the reference file. */
if (compareFile.text.split().join(' ') .equals(results.split().join(' '))) {
passed++
/* For really big payloads, Java string comparison fails, so
* execute the cmp from the command line and see if the files match.
*/
} else {
if (!testUsingCMPFromCommandLine(compareFile, results)) {
//if (compareFile.text.substring(0,150).equals(results.substring(0,150)) ){
// passed++;
//}
}
}
} else if (! results && allowNull) {
passed++
}
}

def testUsingCMPFromCommandLine(compareFile, results) {
failedDir = new File(cwd, "failed_compare")
failedDir = new File(failedDir, "${url.host}_${url.port}_${url.path.split('/')[1]}_for_${inputDir.name}")
failedDir.mkdirs()

failedCompareFile = new File(failedDir, compareFile.name)

compareCommand = "cmp $compareFile $failedCompareFile"

if (results) {
failedCompareFile.write(results)
cmpProcess = compareCommand.execute()
pText = cmpProcess.text
if (verbose) println pText
cmpProcess.waitFor()
if (cmpProcess.exitValue() == 0){
passed++
}
else {
println "The test failed cmp compare $compareCommand"
return false
}
}

return true
}

def executeMethod(testFile, url, body, method, headers, format) {

if (verbose) {
println """
headers = $headers
method = $method
format = $format
"""
}
httpConnection = url.openConnection()
httpConnection.connectTimeout = 180000
httpConnection.requestMethod = method
httpConnection.doOutput = true
httpConnection.doInput = true
httpConnection.useCaches = false
httpConnection.allowUserInteraction = false
httpConnection.followRedirects = false
httpConnection.setRequestProperty("Content-type", "application/json;charset=UTF-8")
if (headers) {
headers.each{key, value ->
if (verbose) println "Setting Header $key $value"
httpConnection.setRequestProperty(key, value)
}
}
if (method=="POST" || method == "PUT") {
httpConnection.outputStream.write(body.getBytes("UTF-8"))
httpConnection.outputStream.flush()
httpConnection.outputStream.close()
}
def json = null
def responseHeaders = []
def responseCode = -1
def responseMessage = "nothing"
try {

responseCode = httpConnection.responseCode
responseMessage = httpConnection.responseMessage
if (verbose) println "response code $httpConnection.responseCode"
if (responseCode==200) {
json = httpConnection.inputStream.readLines().join("\n")
} else {
if (httpConnection.errorStream) {
json = httpConnection.errorStream.readLines().join("\n")
}
}
for (int index in 0..10) {
value = httpConnection.getHeaderField(index)
if (!value) break
if (httpConnection.getHeaderFieldKey(index) == "Date") continue
responseHeaders.add("HEADER: ${httpConnection.getHeaderFieldKey(index)}:=${value}")
}

} catch (IOException iex) {
println "$testFile.name was unable to connect: $iex.message"
ioExceptions++
return null
} catch(Exception ex) {
ex.printStackTrace()
println "Unexpected error"
}

/* Add new line characters after every curly braket '}' and comma ','. */
if (json && format) {
json = json.replace("}", "}\n").replace(",", ",\n")
}
return """
#RESPONSE CODE: $responseCode
#RESPONSE MESSAGE: $responseMessage
#HEADERS START:
${responseHeaders.join('\n')}
#HEADERS END:
$json
END
"""
}

println "Test count = ${count}, passed = ${passed}, generated = ${generated}, IO error = $ioExceptions"
println "JSON Exceptions = ${jsonExceptions}, null results = ${nullResults}, XML Faults=${xmlFaults}"



No comments:

Post a Comment

Kafka and Cassandra support, training for AWS EC2 Cassandra 3.0 Training