init: fix broken verify_interval

The operands seems to have been inverted which in turn
created the situation whereby the interval was always
changed to match the min_bs

Fixes: https://github.com/axboe/fio/issues/522
Signed-off-by: Damian Yurzola <damian@yurzola.net>
diff --git a/init.c b/init.c
index ae3c4f7..8861c99 100644
--- a/init.c
+++ b/init.c
@@ -802,13 +802,14 @@
 			o->verify_interval = o->min_bs[DDIR_READ];
 
 		/*
-		 * Verify interval must be a factor or both min and max
+		 * Verify interval must be a factor of both min and max
 		 * write size
 		 */
-		if (o->verify_interval % o->min_bs[DDIR_WRITE] ||
-		    o->verify_interval % o->max_bs[DDIR_WRITE])
+		if (!o->verify_interval ||
+				o->min_bs[DDIR_WRITE] % o->verify_interval ||
+				o->max_bs[DDIR_WRITE] % o->verify_interval)
 			o->verify_interval = gcd(o->min_bs[DDIR_WRITE],
-							o->max_bs[DDIR_WRITE]);
+						 o->max_bs[DDIR_WRITE]);
 	}
 
 	if (o->pre_read) {
@@ -1585,7 +1586,7 @@
 			p.avg_msec = min(o->log_avg_msec, o->bw_avg_time);
 		else
 			o->bw_avg_time = p.avg_msec;
-	
+
 		p.hist_msec = o->log_hist_msec;
 		p.hist_coarseness = o->log_hist_coarseness;
 
@@ -1616,7 +1617,7 @@
 			p.avg_msec = min(o->log_avg_msec, o->iops_avg_time);
 		else
 			o->iops_avg_time = p.avg_msec;
-	
+
 		p.hist_msec = o->log_hist_msec;
 		p.hist_coarseness = o->log_hist_coarseness;