diff --git a/vviewer/Image.py b/vviewer/Image.py
index bb9e2ecd6c854124b83f483f2e9aace1916d6e13..016d7c44a4949178ec316466b6b701c1b5f801c6 100644
--- a/vviewer/Image.py
+++ b/vviewer/Image.py
@@ -374,34 +374,34 @@ class Image(object):
             return 0
 
         [self.image_slices_pos[0], truth] = \
-            pg.makeARGB(
+            pg.mymakeARGB(
                 self.image_res[self.coord[0],:,:], lut=self.cmap_pos,
                 levels=[self.threshold_pos[0], self.threshold_pos[1]],
                 useRGBA=True)
         [self.image_slices_pos[1], truth] = \
-            pg.makeARGB(
+            pg.mymakeARGB(
                 self.image_res[:,self.coord[1],:], lut=self.cmap_pos,
                 levels=[self.threshold_pos[0], self.threshold_pos[1]],
                 useRGBA=True)
         [self.image_slices_pos[2], truth] = \
-            pg.makeARGB(
+            pg.mymakeARGB(
                 self.image_res[:,:,self.coord[2]], lut=self.cmap_pos,
                 levels=[self.threshold_pos[0], self.threshold_pos[1]],
                 useRGBA=True)
 
         if self.two_cm:
             [self.image_slices_neg[0], truth] = \
-                pg.makeARGB(
+                pg.mymakeARGB(
                     self.image_res[self.coord[0],:,:], self.cmap_neg,
                     levels=[self.threshold_neg[0], self.threshold_neg[1]],
                     useRGBA=True)
             [self.image_slices_neg[1], truth] = \
-                pg.makeARGB(
+                pg.mymakeARGB(
                     self.image_res[:,self.coord[1],:], self.cmap_neg,
                     levels=[self.threshold_neg[0], self.threshold_neg[1]],
                     useRGBA=True)
             [self.image_slices_neg[2], truth] = \
-                pg.makeARGB(
+                pg.mymakeARGB(
                     self.image_res[:,:,self.coord[2]], self.cmap_neg,
                     levels=[self.threshold_neg[0], self.threshold_neg[1]],
                     useRGBA=True)
@@ -431,12 +431,12 @@ class Image(object):
             sliced = self.image_res[:,coord,:]
         if plane == 't':
             sliced = self.image_res[:,:,coord]
-        [slice_rgba, truth] = pg.makeARGB(
+        [slice_rgba, truth] = pg.mymakeARGB(
             sliced, self.cmap_pos, levels=[self.threshold_pos[0],
             self.threshold_pos[1]], useRGBA=True)
         if (self.two_cm and
                 float(self.threshold_neg[0]) != float(self.threshold_neg[1])):
-            [slice_neg, truth] = pg.makeARGB(
+            [slice_neg, truth] = pg.mymakeARGB(
                 sliced, self.cmap_neg, levels=[self.threshold_neg[0],
                 self.threshold_neg[1]], useRGBA=True)
             slice_rgba = np.add(slice_rgba, slice_neg)
diff --git a/vviewer/pyqtgraph/functions.py b/vviewer/pyqtgraph/functions.py
index 24280b146d259b77a8caf5dfc94e872fd3182dbc..33a0fba053ce8a5f542afb2764e7d5a54f931604 100644
--- a/vviewer/pyqtgraph/functions.py
+++ b/vviewer/pyqtgraph/functions.py
@@ -848,6 +848,162 @@ def makeRGBA(*args, **kwds):
     kwds['useRGBA'] = True
     return makeARGB(*args, **kwds)
 
+
+def myrescaleData(data, scale, maxi, mini):
+    d2 = data-mini
+    d2 = np.multiply(d2, (scale-2)/(maxi-mini)) + 1
+    d2[np.where(data <= mini)] = 0
+    d2[np.where(data >= maxi)] = scale
+    data = d2.astype(np.dtype(int))
+    return data
+
+def mymakeARGB(data, lut=None, levels=None, scale=None, useRGBA=False):
+    """
+    Convert an array of values into an ARGB array suitable for building QImages, OpenGL textures, etc.
+
+    Returns the ARGB array (values 0-255) and a boolean indicating whether there is alpha channel data.
+    This is a two stage process:
+
+        1) Rescale the data based on the values in the *levels* argument (min, max).
+        2) Determine the final output by passing the rescaled values through a lookup table.
+
+    Both stages are optional.
+
+    ============== ==================================================================================
+    **Arguments:**
+    data           numpy array of int/float types. If
+    levels         List [min, max]; optionally rescale data before converting through the
+                   lookup table. The data is rescaled such that min->0 and max->*scale*::
+
+                      rescaled = (clip(data, min, max) - min) * (*scale* / (max - min))
+
+                   It is also possible to use a 2D (N,2) array of values for levels. In this case,
+                   it is assumed that each pair of min,max values in the levels array should be
+                   applied to a different subset of the input data (for example, the input data may
+                   already have RGB values and the levels are used to independently scale each
+                   channel). The use of this feature requires that levels.shape[0] == data.shape[-1].
+    scale          The maximum value to which data will be rescaled before being passed through the
+                   lookup table (or returned if there is no lookup table). By default this will
+                   be set to the length of the lookup table, or 256 is no lookup table is provided.
+                   For OpenGL color specifications (as in GLColor4f) use scale=1.0
+    lut            Optional lookup table (array with dtype=ubyte).
+                   Values in data will be converted to color by indexing directly from lut.
+                   The output data shape will be input.shape + lut.shape[1:].
+
+                   Note: the output of makeARGB will have the same dtype as the lookup table, so
+                   for conversion to QImage, the dtype must be ubyte.
+
+                   Lookup tables can be built using GradientWidget.
+    useRGBA        If True, the data is returned in RGBA order (useful for building OpenGL textures).
+                   The default is False, which returns in ARGB order for use with QImage
+                   (Note that 'ARGB' is a term used by the Qt documentation; the _actual_ order
+                   is BGRA).
+    ============== ==================================================================================
+    """
+    profile = debug.Profiler()
+
+    if lut is not None and not isinstance(lut, np.ndarray):
+        lut = np.array(lut)
+    if levels is not None and not isinstance(levels, np.ndarray):
+        levels = np.array(levels)
+
+    if levels is not None:
+        if levels.ndim == 1:
+            if len(levels) != 2:
+                raise Exception('levels argument must have length 2')
+        elif levels.ndim == 2:
+            if lut is not None and lut.ndim > 1:
+                raise Exception('Cannot make ARGB data when bot levels and lut have ndim > 2')
+            if levels.shape != (data.shape[-1], 2):
+                raise Exception('levels must have shape (data.shape[-1], 2)')
+        else:
+            print(levels)
+            raise Exception("levels argument must be 1D or 2D.")
+
+    profile()
+
+    if scale is None:
+        if lut is not None:
+            scale = lut.shape[0]
+        else:
+            scale = 255.
+
+    ## Apply levels if given
+    if levels is not None:
+
+        if isinstance(levels, np.ndarray) and levels.ndim == 2:
+            ## we are going to rescale each channel independently
+            if levels.shape[0] != data.shape[-1]:
+                raise Exception("When rescaling multi-channel data, there must be the same number of levels as channels (data.shape[-1] == levels.shape[0])")
+            newData = np.empty(data.shape, dtype=int)
+            for i in range(data.shape[-1]):
+                minVal, maxVal = levels[i]
+                if minVal == maxVal:
+                    maxVal += 1e-16
+                #newData[...,i] = rescaleData(data[...,i], scale/(maxVal-minVal), minVal, dtype=int)
+                newData[...,i] = myrescaleData(data[...,i], scale, maxVal, minVal)                
+            data = newData
+        else:
+            minVal, maxVal = levels
+            if minVal == maxVal:
+                maxVal += 1e-16
+            if maxVal == minVal:
+                #data = rescaleData(data, 1, minVal, dtype=int)
+                data = myrescaleData(data, 1, maxVal, minVal)
+            else:
+                # print(maxVal)
+                # print(minVal)
+                # print(scale)
+                # print(scale/(maxVal-minVal))
+                #data = rescaleData(data, scale/(maxVal-minVal), minVal, dtype=int)
+                data = myrescaleData(data, scale, maxVal, minVal)
+
+    profile()
+
+    ## apply LUT if given
+    if lut is not None:
+        data = applyLookupTable(data, lut)
+    else:
+        if data.dtype is not np.ubyte:
+            data = np.clip(data, 0, 255).astype(np.ubyte)
+
+    profile()
+
+    ## copy data into ARGB ordered array
+    imgData = np.empty(data.shape[:2]+(4,), dtype=np.ubyte)
+
+    profile()
+
+    if useRGBA:
+        order = [0,1,2,3] ## array comes out RGBA
+    else:
+        order = [2,1,0,3] ## for some reason, the colors line up as BGR in the final image.
+
+    if data.ndim == 2:
+        # This is tempting:
+        #   imgData[..., :3] = data[..., np.newaxis]
+        # ..but it turns out this is faster:
+        for i in range(3):
+            imgData[..., i] = data
+    elif data.shape[2] == 1:
+        for i in range(3):
+            imgData[..., i] = data[..., 0]
+    else:
+        for i in range(0, data.shape[2]):
+            imgData[..., i] = data[..., order[i]]
+
+    profile()
+
+    if data.ndim == 2 or data.shape[2] == 3:
+        alpha = False
+        imgData[..., 3] = 255
+    else:
+        alpha = True
+
+    profile()
+    return imgData, alpha
+
+
 def makeARGB(data, lut=None, levels=None, scale=None, useRGBA=False):
     """
     Convert an array of values into an ARGB array suitable for building QImages, OpenGL textures, etc.
diff --git a/vviewer/vviewer.py b/vviewer/vviewer.py
index 648f42f01188e7f2084c05b679b798a7e9a335dc..5377242d80149552d28933e37558e0f3f9a57800 100755
--- a/vviewer/vviewer.py
+++ b/vviewer/vviewer.py
@@ -897,10 +897,11 @@ class vviewer(QtGui.QMainWindow):
                 self.images[i].funcdialog.setWindowTitle(itemname)
 		# HERE IS THE QUICK FIX
                 if self.images[i].frame_time == 0:
-                    self.images[i].frame_time = 1.0
-                    QtGui.QMessageBox.warning(self, "Warning",
+		    self.images[i].frame_time = 1.0
+                    QtGui.QMessageBox.warning(
+                        self, "Warning",
                         "Warning: TR not found. Set automatically to 1. TR can \
-			             be changed in the functional image dialog.")
+			be changed in the functional image dialog.")
 
         # If functional image are present the widgets are turned on.
         self.resetFuncView()
@@ -1520,8 +1521,9 @@ class vviewer(QtGui.QMainWindow):
                 self.cursor_coord[2] >= 0):
                 coords_valid = True
             if coords_valid:
-                intensity = str(np.round(
-                    self.images[index].getIntensity(self.cursor_coord),3))
+                # intensity = str(np.round(
+                #     self.images[index].getIntensity(self.cursor_coord),3))
+                intensity = str(self.images[index].getIntensity(self.cursor_coord))
             else:
                 intensity = "nan"
             self.intensity_lbl_cursor.setText(intensity)
@@ -1573,8 +1575,9 @@ class vviewer(QtGui.QMainWindow):
         if index >= 0:
             # no check for coordinates because the crosshair position should
             # always be within the image
-            intensity = (
-                str(np.round(self.images[index].getIntensity(),3)))
+            #intensity = (
+            #    str(np.round(self.images[index].getIntensity(),3)))
+            intensity = (str(self.images[index].getIntensity()))
             self.intensity_label.setText(intensity)
 
         # update ValueWindow