Before I get to the question, let me note that I'm a trained scientist, not a programmer; I've done my best to self-teach what I've needed to know so far, but in the interest of making my code usable for other people in my research group, I'd like to improve my code. I spent a weekend writing actual documentation for stuff, and the next thing on the list of making this more stable is unit testing, which I think I have a basic understanding of, but have never implemented in anything I've written.
I've developed a wrapper for paramiko that allows me load scientific data from a remote host. I was curious about how to best design a unit test for other scripts that rely on this function, or if one is even necessary? If there are any security holes or performance improvements, suggestions would be very appreciated!
def get_remote_data(filepath, time=False, info=False, copy_to_local=False):
    """
    A paramiko wrapper that gets file from a remote computer. Parses
    hostname from filepath. Works only for netcdf files!
    Keyword Arguments:
    filepath         -- the path of the file with hostname.
    time             -- print time required for loading(default False)
    info             -- print some information about the file after loading 
                        (default False)
    copy_to_local    -- copies file to local /tmp/remote_data/ and checks 
                        if this file already exists there (default False)
    Example:
    >>> get_remote_data('pgierz@rayo3:/csys/paleo2/pgierz/GR30s.nc')
    # TODO: Find out if this is the right way to do this, and make a unit test.
    Paul J. Gierz, Sat Feb 14 14:20:43 2015
    """
    # Import stuff from your own library:
    from UI_Stuff import print_colors
    if time:
        import time
    import paramiko
    import os
    from scipy.io import netcdf
    if time:
        now = time.time()
    print "Trying to load ".ljust(40) \
          +print_colors.WARNING("{f}").format(f=os.path.basename(filepath)).ljust(100)
    if not copy_to_local:
        # We wish to split the filepath to get the username, hostname, and
        # path on the remote machine.
        user = filepath.split(':')[0].split('@')[0]
        host = filepath.split(':')[0].split('@')[1]
        rfile = filepath.split(':')[1]
        # FIXME: This function only works if .ssh/id_rsa exists
        privatekeyfile = os.path.expanduser('~/.ssh/id_rsa')
        mykey = paramiko.RSAKey.from_private_key_file(privatekeyfile)
        client = paramiko.SSHClient()
        client.load_system_host_keys()
        client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
        client.connect(host, username=user, pkey=mykey)
        sftp = client.open_sftp()
        fileObject = sftp.file(rfile)
        file = netcdf.netcdf_file(fileObject)
        pre_s = "Loaded from "+host
    else:
        pre_s, fileObject = _copy_remote_file(file path)  # This is my own function that creates an rsync process
        file = netcdf.netcdf_file(fileObject)        
    if time:
        print pre_s.ljust(40) \
        +print_colors.OKGREEN("{filepath}").format(filepath=os.path.basename(filepath)).ljust(100) \
        +" in ".rjust(0) \
        +print_colors.OKBLUE("{time}").format(time=round(time.time()-now)) \
        +" seconds"
    if info:
        s = print_colors.HEADER("#"*30+" INFO of "+os.path.basename(filepath)+" "+"#"*30)
        print s
        print "Variables: \n"
        for k, v in file.variables.iteritems():
            print k, ":  dimensions -"+str(v.dimensions)+" shape - "+str(v.shape)
        print "Dimensions: \n"
        print file.dimensions
        print print_colors.HEADER("#"*len(s))
    return file