Skip to content

Full Pipeline

Inputs

  • start_date: start date of RV calculations
  • end_date: end date of RV calculations
  • cadence: how often you want to calculate [seconds]
  • csv_name: file name to save calculations

Example usage

start_date = datetime.datetime(2021, 8, 26, 8, 00, 0)
end_date = datetime.datetime(2021, 9, 3, 0, 00, 0)
cadence = 24*3600
csv_name = 'csv_name'
component_calc(start_date, end_date, cadence, csv_name)

Function

def component_calc(start_date, end_date, cadence, csv_name=None):
    """
    function to calculate rv components using pipeline functions

    Parameters
    ----------
    start_date: start date of RV calculations
    end_date: end date of RV calculations
    cadence: how often to calculate RV components
    csv_name: name of file to store calculations in

    Returns
    -------

    """

    # create file names
    if csv_name is None:
        csv_name = str(start_date)
    csv_name = os.path.join(CsvDir.CALC, csv_name + '.csv')
    bad_dates_csv = os.path.join(CsvDir.CALC, csv_name + '_bad_dates.csv')

    # print out csv title
    print("Beginning calculation of values for csv file: " + csv_name)

    # List of header strings
    row_contents = ['date_obs', 'date_jd', 'v_quiet', 'v_disc', 'v_phot', 'v_conv', 'f_bright', 'f_spot', 'f', 'Bobs',
                    'vphot_bright', 'vphot_spot', 'f_small', 'f_large', 'f_network', 'f_plage', 'f_nonconv',
                    'quiet_flux', 'ar_flux', 'conv_flux', 'pol_flux', 'pol_conv_flux', 'vconv_quiet', 'vconv_large',
                    'vconv_small']

    # Append a list as new line to an old csv file
    utils.append_list_as_row(csv_name, row_contents)

    # get hmi data products
    time_range = datetime.timedelta(seconds=22)
    physobs_list = [a.Physobs.los_velocity, a.Physobs.los_magnetic_field, a.Physobs.intensity]

    # get dates list
    xy = (end_date - start_date).seconds + (end_date - start_date).days * 24 * 3600
    dates_list = [start_date + datetime.timedelta(seconds=cadence * x) for x in range(0, int(xy / cadence))]

    for i, date in enumerate(dates_list):
        # convert the date to a string -- required for use in csv file
        date_str, date_obj, date_jd = utils.get_dates(date)

        # pull image within specified time range
        result = Fido.search(a.Time(str(date_obj - time_range), str(date_obj + time_range)),
                             a.Instrument.hmi, physobs_list[0] | physobs_list[1] | physobs_list[2])

        # add file to list
        file_download = Fido.fetch(result)

        # remove unusable file types
        good_files = []
        for file in file_download:
            name, extension = os.path.splitext(file)
            if extension == '.fits':
                good_files.append(file)

        if len(good_files) != 3:
            # append these values to the csv file
            save_vals = [date_str, 'not three good files']
            utils.append_list_as_row(bad_dates_csv, save_vals)
            # print that the files are missing
            print('\nNot three good files: ' + date_str + ' index: ' + str(i))

            pass
        else:
            # convert to map sequence
            map_seq = sunpy.map.Map(sorted(good_files))

            # check for missing data types
            missing_map = False
            # split into data types
            for j, map_obj in enumerate(map_seq):
                if map_obj.meta['content'] == 'DOPPLERGRAM':
                    vmap = map_obj
                elif map_obj.meta['content'] == 'MAGNETOGRAM':
                    mmap = map_obj
                elif map_obj.meta['content'] == 'CONTINUUM INTENSITY':
                    imap = map_obj
                else:
                    missing_map = True

            if missing_map:
                print("Missing a data product for " + date_str)

                # add the data
                # append these values to the csv file
                save_vals = [date_str, 'missing data product']
                utils.append_list_as_row(bad_dates_csv, save_vals)
                pass

            else:
                # coordinate transformation for maps
                x, y, pd, r, d, mu = ctfuncs.coordinates(vmap)
                wij, nij, rij = ctfuncs.vel_coords(x, y, pd, r, vmap)

                # remove bad mu values
                vmap, mmap, imap = ctfuncs.fix_mu(mu, [vmap, mmap, imap])

                # calculate relative positions
                deltaw, deltan, deltar, dij = sfuncs.rel_positions(wij, nij, rij, vmap)

                # calculate spacecraft velocity
                vsc = sfuncs.spacecraft_vel(deltaw, deltan, deltar, dij, vmap)

                # optimized solar rotation parameters
                a1 = 14.713
                a2 = -2.396
                a3 = -1.787
                a_parameters = [a1, a2, a3]

                # calculation of solar rotation velocity
                vrot = sfuncs.solar_rot_vel(wij, nij, rij, deltaw, deltan, deltar, dij, vmap, a_parameters)

                # calculate corrected velocity
                corrected_vel = vmap.data - np.real(vsc) - np.real(vrot)

                # corrected velocity maps
                map_vel_cor = sfuncs.corrected_map(corrected_vel, vmap, map_type='Corrected-Dopplergram',
                                            frame=frames.HeliographicCarrington)

                # limb brightening
                Lij = lbfuncs.limb_polynomial(imap)

                # calculate corrected data
                Iflat = imap.data / Lij

                # corrected intensity maps
                map_int_cor = sfuncs.corrected_map(Iflat, imap, map_type='Corrected-Intensitygram',
                                            frame=frames.HeliographicCarrington)

                # magnetic noise level
                B_noise = 8

                # calculate unsigned field strength
                Bobs, Br = sfuncs.mag_field(mu, mmap, B_noise)

                # corrected observed magnetic data map
                map_mag_obs = sfuncs.corrected_map(Bobs, mmap, map_type='Corrected-Magnetogram',
                                            frame=frames.HeliographicCarrington)


                ### calculate magnetic threshold
                # magnetic threshold value (G) from Yeo et. al. 2013
                Br_cutoff = 24
                # mu cutoff value
                mu_cutoff = 0.3

                # calculate magnetic threshold
                active, quiet = sfuncs.mag_thresh(mu, mmap, Br_cutoff=Br_cutoff, mu_cutoff=mu_cutoff)

                # calculate intensity threshold
                fac_inds, spot_inds = sfuncs.int_thresh(map_int_cor, active, quiet)

                # create threshold array
                thresh_arr = sfuncs.thresh_map(fac_inds, spot_inds)

                # full threshold maps
                map_full_thresh = sfuncs.corrected_map(thresh_arr, mmap, map_type='Threshold',
                                                frame=frames.HeliographicCarrington)

                ### velocity contribution due to convective motion of quiet-Sun
                v_quiet = v_quiet(map_vel_cor, imap, quiet)

                ### velocity contribution due to rotational Doppler imbalance of active regions (faculae/sunspots)
                # calculate photospheric velocity
                v_phot, vphot_bright, vphot_spot = v_phot(quiet, active, Lij, vrot, imap, mu, fac_inds,
                                                          spot_inds)

                ### velocity contribution due to suppression of convective blueshift by active regions
                # calculate disc-averaged velocity
                v_disc = v_disc(map_vel_cor, imap)

                # calculate convective velocity
                v_conv = v_disc - v_quiet

                ### filling factor
                # calculate filling factor
                f_bright, f_spot, f = sfuncs.filling_factor(mu, mmap, active, fac_inds, spot_inds)

                ### unsigned magnetic flux
                # unsigned observed flux
                unsigned_obs_flux = sfuncs.unsigned_flux(map_mag_obs, imap)

                ### calculate the area filling factor
                pixA_hem = ctfuncs.pix_area_hem(wij, nij, rij, vmap)
                area = sfuncs.area_calc(active, pixA_hem)
                f_small, f_large, f_network, f_plage, f_nonconv =sfuncs.area_filling_factor(active, area, mu, mmap,
                                                                                      fac_inds)

                ### get the unsigned flux
                quiet_flux, ar_flux, conv_flux, pol_flux, pol_conv_flux = sfuncs.area_unsigned_flux(map_mag_obs, imap,
                                                                                             area,
                                                                                             active)

                ### get area weighted convective velocities
                vconv_quiet, vconv_large, vconv_small = sfuncs.area_vconv(map_vel_cor, imap, active, area)

                # make array of what we want to save
                save_vals = [v_quiet, v_disc, v_phot, v_conv, f_bright, f_spot, f, unsigned_obs_flux, vphot_bright,
                             vphot_spot, f_small, f_large, f_network, f_plage, f_nonconv, quiet_flux, ar_flux,
                             conv_flux, pol_flux, pol_conv_flux, vconv_quiet, vconv_large, vconv_small]

                # round stuff
                rounded = np.around(save_vals, 3)
                round_vals = [date_str, date_jd]
                for val in rounded:
                    round_vals.append(val)

                # append these values to the csv file
                utils.append_list_as_row(csv_name, round_vals)

                # print that the date is completed
                print('\nCalculations and save to file complete for ' + date_str + ' index: ' + str(i))

    print('Calculation complete for dates:', start_date, 'to', end_date)
Back to top