In [1]:
import warnings
import xarray as xr
import matplotlib.pyplot as plt
import matplotlib.colors as mc
from matplotlib.ticker import AutoMinorLocator
import matplotlib.path as mpath
import cartopy.crs as ccrs
from cartopy.util import add_cyclic_point
import cartopy.feature as feature
from cmocean import cm as cmo
from netCDF4 import Dataset as NetCDFFile
import statsmodels.api as sm
import statsmodels.tools.eval_measures as smte
import os
import datetime
import paramiko
import pandas as pd
import datetime
import numpy as np
import matplotlib.dates as mdates
from cdo import *
cdo =Cdo()
%matplotlib inline
In [2]:
#warnings.simplefilter(action='once')
warnings.simplefilter("ignore")
In [3]:
class Color:
	BLACK          = '\033[30m'#(文字)黒
	RED            = '\033[31m'#(文字)赤
	GREEN          = '\033[32m'#(文字)緑
	YELLOW         = '\033[33m'#(文字)黄
	BLUE           = '\033[34m'#(文字)青
	MAGENTA        = '\033[35m'#(文字)マゼンタ
	CYAN           = '\033[36m'#(文字)シアン
	WHITE          = '\033[37m'#(文字)白
	COLOR_DEFAULT  = '\033[39m'#文字色をデフォルトに戻す
	BOLD           = '\033[1m'#太字
	UNDERLINE      = '\033[4m'#下線
	INVISIBLE      = '\033[08m'#不可視
	REVERCE        = '\033[07m'#文字色と背景色を反転
	BG_BLACK       = '\033[40m'#(背景)黒
	BG_RED         = '\033[41m'#(背景)赤
	BG_GREEN       = '\033[42m'#(背景)緑
	BG_YELLOW      = '\033[43m'#(背景)黄
	BG_BLUE        = '\033[44m'#(背景)青
	BG_MAGENTA     = '\033[45m'#(背景)マゼンタ
	BG_CYAN        = '\033[46m'#(背景)シアン
	BG_WHITE       = '\033[47m'#(背景)白
	BG_DEFAULT     = '\033[49m'#背景色をデフォルトに戻す
	RESET          = '\033[0m'#全てリセット
In [4]:
exp_id='LGMoGMwLglac1dLc'
exp_id1='LGMoGM_2005LAI'
exp_id0='piAMIP_2005LAI'
gcmdir='/data29/kanon/miroc5-iso/'
home='/home/kanon/monitor/miroc5-iso/'
intyr=5
#intyr=10
In [5]:
latmin=-90
latmax=-40
nlats=18
lonmin=-180
lonmax=180
nlons = 128 # 経度方向のデータ数
year=2003 
numofdays=365
# DomeF
# 南緯77度19分01秒 東経39度42分12秒座標: 南緯77度19分01秒 東経39度42分12秒
df_lat=-77.3
df_lon=39.66
c_df='black'
# DomeF
# 南緯77度19分01秒 東経39度42分12秒座標: 南緯77度19分01秒 東経39度42分12秒
df_lat=-77.3
df_lon=39.66
c_df='black'
s_df=25
In [6]:
# Compute a circle in axes coordinates, which we can use as a boundary
# for the map. We can pan/zoom as much as we like - the boundary will be
# permanently circular.
theta = np.linspace(0, 2*np.pi, 100)
center, radius = [0.5, 0.5], 0.5
verts = np.vstack([np.sin(theta), np.cos(theta)]).T
circle = mpath.Path(verts * radius + center)

LGMoGMwLglac1dLc (LGM SST, sea ice, CO2 & ice sheets)

In [7]:
# For the size of the figures
plt.rcParams['figure.dpi'] = 300

Current date and time

In [8]:
now = datetime.datetime.now()
print (now.strftime("%Y-%m-%d %H:%M"))
2020-08-03 18:04

The following jobs are currently running on isotope3

In [9]:
client = paramiko.SSHClient()
client.load_system_host_keys()
client.connect('isotope3.iis.u-tokyo.ac.jp', username='kanon')

stdin, stdout, stderr = client.exec_command('qstat')
queue_status = stdout.readlines()
queue_status = [l.split() for l in queue_status]
def check_everything():
    if len(queue_status) != 0:
        queue_df = pd.DataFrame(queue_status[2:])
        queue_df.columns = ['JOB ID', 'JOB NAME', 'USERNAME', 'CPU TIME USE', 'STATUS', 'QUEUE']
        if any(queue_df['USERNAME'].str.startswith('kanon')):
            if any(queue_df['USERNAME'].str.startswith('kanon') & queue_df['JOB NAME'].str.endswith(exp_id[-10:])):
                return queue_df[(queue_df['USERNAME'] == 'kanon') & (queue_df['JOB NAME'].str.endswith(exp_id[-10:]))]
            else:
                print ('No '+ exp_id + ' job running on isotope3')
        else:
            print("no jobs running on isotope3")
        return None
check_everything()
no jobs running on isotope3
In [10]:
# Function to read the log file
def get_log_output(exp_id):
    client = paramiko.SSHClient()
    client.load_system_host_keys()
    client.connect('isotope3.iis.u-tokyo.ac.jp', username='kanon')
    stdin, stdout, stderr = client.exec_command('cat '+gcmdir+'sh/'+exp_id+'/'+exp_id+'.log')
    return stdout

def generate_dataframe_from_miroc_logfile(log):
    log_dataframe = pd.read_table(log,
                                  sep=r" :  | -" ,
                                  skiprows=1,
                                  infer_datetime_format=True,
                                  names=["Date", "Message", "State"],
                                  engine='python', index_col=0)
    middle_column = log_dataframe["Message"].apply(lambda x: pd.Series(str(x).split()[0:3]))
    log_dataframe.drop("Message", axis=1, inplace=True)
    middle_column.columns = ["Run Number", "Exp Date", "Job ID"]
    log_dataframe = pd.concat([log_dataframe, middle_column], axis=1)
    #log_dataframe.set_index(pd.to_datetime(log_dataframe.index), inplace=True)
    log_dataframe.set_index(pd.to_datetime(log_dataframe.index, utc=True), inplace=True)
    log_dataframe.index = log_dataframe.index.tz_convert('Asia/Tokyo')
    return log_dataframe

def compute_effective_throughput(log_dataframe, verbose=False):
    starts = log_dataframe[log_dataframe.State == " start"]
    ends = log_dataframe[log_dataframe.State == " done"]
    # Drop the duplicated starts:
    starts.drop_duplicates(subset="Run Number", keep="last")
    merged = pd.concat([starts, ends])
    groupby = merged.groupby("Run Number")
    run_diffs = {"Run Number": [], "Time Diff": []}
    for name, group in groupby:
        run_diffs["Run Number"].append(int(name))
        run_diffs["Time Diff"].append(group.index[-1] - group.index[0])
    diffs = pd.DataFrame(run_diffs).sort_values("Run Number").set_index("Run Number")
    DAY = datetime.timedelta(1)
    throughput = DAY / diffs.mean()
    if verbose:
        print("Your run is taking %s on average" % average_timedelta)
        print("this is an effective throughput of %s simulated runs per day, assuming no queue time" % throughput)
    return diffs.mean(), throughput, diffs
In [11]:
# Average walltime and effective number of simulated years per day:
#stdin, stdout, stderr = client.exec_command("cd "+gcmdir+"sh/"+exp_id+"; grep 'setenv YSTR' agcm_"+exp_id+".run.sh")
#start_year = int(stdout.readlines()[0].split()[2])
start_year=1951
stdin, stdout, stderr = client.exec_command("cd "+gcmdir+"sh/"+exp_id+"; cat *.date")
current_year = int(stdout.readlines()[0].split()[0][:4])
stdin, stdout, stderr = client.exec_command("cd "+gcmdir+"sh/"+exp_id+"; grep 'setenv YMAX' agcm_"+exp_id+".run2.sh")
final_year = int(stdout.readlines()[0].split()[2])
current_year = current_year - start_year
final_year = final_year - start_year

walltime, throughput, diffs = compute_effective_throughput(
    generate_dataframe_from_miroc_logfile(get_log_output(exp_id)))
df = pd.DataFrame.from_dict({"Walltime": walltime,
                   "Throughput": throughput}, orient="index")
df.columns = [exp_id]
df
Out[11]:
LGMoGMwLglac1dLc
Throughput 0.511836
Walltime 1 days 22:53:23.954545
In [12]:
# Simulation Timeline
from pandas.plotting import register_matplotlib_converters
register_matplotlib_converters()
#
log_dataframe = generate_dataframe_from_miroc_logfile(get_log_output(exp_id))
# Remove last line of log file if the experiment is over
if "over" in log_dataframe.iloc[-1]["Exp Date"]:
    log_dataframe = log_dataframe[:-1]
    print ('the experiment is over')
# Drop the last entry if it's start:
if "start" in log_dataframe.iloc[-1]["State"]:
    end_of_log = log_dataframe.iloc[:-1].tail(30)
else:
    end_of_log = log_dataframe.tail(30)
end_groups = end_of_log.groupby("Run Number")
f, ax = plt.subplots(1, 1, dpi=150, figsize=(15, 1.5))
#print(group.index)
for name, group in end_groups:
    bdate = group.index[0]
    edate = group.index[0]
    edate, bdate = [mdates.date2num(item) for item in (edate, bdate)]
    # The color is the same as the progressbar below, use the colormeter to figure it out.
    ax.barh(0, edate - bdate, left=bdate, height=0.2, color=(217./255., 83./255., 79./255.),
            edgecolor="black")
ax.set_ylim(-0.5, 0.5)
for direction in ["top", "left", "right"]:
    ax.spines[direction].set_visible(False)
ax.yaxis.set_visible(False)
ax.xaxis_date()
#ax.xaxis.set_major_locator(mdates.HourLocator(interval=2))
#ax.xaxis.set_minor_locator(mdates.HourLocator(interval=1))
ax.xaxis.set_major_formatter(mdates.DateFormatter("%H:%M %d.%m.%y", tz=log_dataframe.index.tz))
the experiment is over
In [13]:
# Progress Bar
years_per_day = np.round(df[exp_id].Throughput)
years_left = final_year + 1 - current_year
if years_per_day == 0:
    days_left=999
else:
    days_left = years_left / years_per_day

finishing_date = datetime.datetime.now() + datetime.timedelta(days=days_left)
desc = "Done on "+finishing_date.strftime("%d %b, %Y")+ ": {percentage:3.0f}%"
r_bar = " "+str(current_year)+"/"+str(final_year+1)+ ", Throughput ~"+str(years_per_day)+"yrs/day"
from tqdm import tnrange, tqdm_notebook
pbar = tqdm_notebook(total=final_year+1, 
                     desc=desc,
                     bar_format=desc+"{bar}"+r_bar)
pbar.update(final_year+1 - years_left)
pbar.close()

In [14]:
temp2_file = home +exp_id +'/' +exp_id +'_temp2_miroc5-iso_catted_yearmean_fldmean.nc'
temp2_dataset = xr.open_dataset(temp2_file, decode_times=False)
#temp2_rm = temp2_dataset.rolling(time=21, center=True).mean()
d18Op_file = home +exp_id +'/' +exp_id +'_d18Op_miroc5-iso_catted_yearmean_fldmean.nc'
d18Op_dataset = xr.open_dataset(d18Op_file, decode_times=False)
#d18Op_rm = d18Op_dataset.rolling(time=21, center=True).mean()
In [15]:
temp2_file0 = home +exp_id0 +'/' +exp_id0 +'_temp2_miroc5-iso_catted_yearmean_fldmean.nc'
temp2_dataset0 = xr.open_dataset(temp2_file0, decode_times=False)
#temp2_rm = temp2_dataset.rolling(time=21, center=True).mean()
d18Op_file0 = home +exp_id0 +'/' +exp_id0 +'_d18Op_miroc5-iso_catted_yearmean_fldmean.nc'
d18Op_dataset0 = xr.open_dataset(d18Op_file0, decode_times=False)
#d18Op_rm = d18Op_dataset.rolling(time=21, center=True).mean()

Monitoring of temperature and $\delta$$^{18}O$ in precipitation

In [16]:
f, (ax1, ax2) = plt.subplots(1, 2, dpi=150, figsize=(8,4))

ax1.plot(temp2_dataset.tas.data.squeeze())
#ax1.plot(temp2_rm.tas.data.squeeze(), 'r')
ax1.set_xlabel("Simulation Time (Years)")
ax1.set_ylabel('Global $\mathsf{T_{2m}}$' + ' [\u00b0C]')

ax2.plot(d18Op_dataset.d18O_precip.data.squeeze())
#ax2.plot(d18Op_rm.d18O_precip.data.squeeze(), 'r')
ax2.set_xlabel("Simulation Time (Years)")
ax2.set_ylabel('Global $\mathsf{\delta^{18}O_{p}}$' + ' [\u2030]')

f.suptitle(exp_id +" - miroc5", fontsize=14)

plt.tight_layout()
plt.subplots_adjust(top=0.88, bottom=0.22)
In [17]:
# MIROC5-iso
ifile_PI_timmean_atm = home +exp_id0 +'/' +exp_id0 +'_miroc5-iso_last10yrs_timmean.iso.nc'
ifile_PI_yseasmean_atm = home +exp_id0 +'/' +exp_id0 +'_miroc5-iso_last10yrs_yseasmean.iso.nc'
nc_PI_timmean_atm = NetCDFFile(ifile_PI_timmean_atm)
nc_PI_yseasmean_atm = NetCDFFile(ifile_PI_yseasmean_atm)

# Coordinates
lon_miroc = nc_PI_timmean_atm.variables['lon']
lat_miroc = nc_PI_timmean_atm.variables['lat']
# Time-mean values
d18Op_PI = nc_PI_timmean_atm.variables['d18O_precip'][0,:,:]
temp2m_PI = nc_PI_timmean_atm.variables['tas'][0,:,:]
precip_PI = nc_PI_timmean_atm.variables['pr'][0,:,:]
# Multiyear seasonal mean values
d18Op_yseasmean_PI = nc_PI_yseasmean_atm.variables['d18O_precip'][:,:,:]
temp2m_yseasmean_PI = nc_PI_yseasmean_atm.variables['tas'][:,:,:]
precip_yseasmean_PI = nc_PI_yseasmean_atm.variables['pr'][:,:,:]

# Time-mean values
d18Op_PI_cyc, lon_miroc_cyc = add_cyclic_point(d18Op_PI, coord=lon_miroc)
temp2m_PI_cyc, lon_miroc_cyc = add_cyclic_point(temp2m_PI, coord=lon_miroc)
precip_PI_cyc, lon_miroc_cyc = add_cyclic_point(precip_PI, coord=lon_miroc)
# Multiyear seasonal mean values
d18Op_yseasmean_PI_cyc, lon_miroc_cyc = add_cyclic_point(d18Op_yseasmean_PI, coord=lon_miroc)
temp2m_yseasmean_PI_cyc, lon_miroc_cyc = add_cyclic_point(temp2m_yseasmean_PI, coord=lon_miroc)
precip_yseasmean_PI_cyc, lon_miroc_cyc = add_cyclic_point(precip_yseasmean_PI, coord=lon_miroc)
In [18]:
# MIROC5-iso
ifile_LGM_timmean_atm = home +exp_id +'/' +exp_id +'_miroc5-iso_last'+str(intyr)+'yrs_timmean.iso.nc'
ifile_LGM_yseasmean_atm = home +exp_id +'/' +exp_id +'_miroc5-iso_last'+str(intyr)+'yrs_yseasmean.iso.nc'
nc_LGM_timmean_atm = NetCDFFile(ifile_LGM_timmean_atm)
nc_LGM_yseasmean_atm = NetCDFFile(ifile_LGM_yseasmean_atm)

# Coordinates
#lon_miroc = nc_LGM_timmean_atm.variables['lon']
#lat_miroc = nc_LGM_timmean_atm.variables['lat']
# Time-mean values
d18Op_LGM = nc_LGM_timmean_atm.variables['d18O_precip'][0,:,:]
temp2m_LGM = nc_LGM_timmean_atm.variables['tas'][0,:,:]
precip_LGM = nc_LGM_timmean_atm.variables['pr'][0,:,:]
# Multiyear seasonal mean values
d18Op_yseasmean_LGM = nc_LGM_yseasmean_atm.variables['d18O_precip'][:,:,:]
temp2m_yseasmean_LGM = nc_LGM_yseasmean_atm.variables['tas'][:,:,:]
precip_yseasmean_LGM = nc_LGM_yseasmean_atm.variables['pr'][:,:,:]

# Time-mean values
d18Op_LGM_cyc, lon_miroc_cyc = add_cyclic_point(d18Op_LGM, coord=lon_miroc)
temp2m_LGM_cyc, lon_miroc_cyc = add_cyclic_point(temp2m_LGM, coord=lon_miroc)
precip_LGM_cyc, lon_miroc_cyc = add_cyclic_point(precip_LGM, coord=lon_miroc)
# Multiyear seasonal mean values
d18Op_yseasmean_LGM_cyc, lon_miroc_cyc = add_cyclic_point(d18Op_yseasmean_LGM, coord=lon_miroc)
temp2m_yseasmean_LGM_cyc, lon_miroc_cyc = add_cyclic_point(temp2m_yseasmean_LGM, coord=lon_miroc)
precip_yseasmean_LGM_cyc, lon_miroc_cyc = add_cyclic_point(precip_yseasmean_LGM, coord=lon_miroc)

Maps of 2m-temperature

We take the last 10 years of simulation to make to the mean maps.

PI

In [19]:
fig, axes = plt.subplots(1, 3, figsize=(13,3.5), subplot_kw=dict(projection=ccrs.Robinson()))
plt.rcParams.update({'font.family': 'sans-serif', 'text.usetex': False})

levels = [-70,-60,-50, -40, -30, -20, -15, -10, -5, -2, 0, 2, 5, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 30]
n_colors = len(levels) + 1
cmap = plt.get_cmap('RdYlBu_r', n_colors)
colors = cmap(range(cmap.N))
cmap, norm = mc.from_levels_and_colors(levels, colors, extend='both')

cs_world1 = axes[0].pcolormesh(lon_miroc_cyc, lat_miroc, temp2m_PI_cyc, norm=norm, cmap=cmap, transform=ccrs.PlateCarree())
axes[0].set_title('Annual mean', fontsize=16)
axes[0].coastlines()
cs_world2 = axes[1].pcolormesh(lon_miroc_cyc, lat_miroc, temp2m_yseasmean_PI_cyc[0,:,:], norm=norm, cmap=cmap, transform=ccrs.PlateCarree())
axes[1].set_title('DJF', fontsize=16)
axes[1].coastlines()
cs_world3 = axes[2].pcolormesh(lon_miroc_cyc, lat_miroc, temp2m_yseasmean_PI_cyc[2,:,:], norm=norm, cmap=cmap, transform=ccrs.PlateCarree())
axes[2].set_title('JJA', fontsize=16)
axes[2].coastlines()

cbar_ax = fig.add_axes([0.02, 0.2, 0.96, 0.05])
cbar=fig.colorbar(cs_world1, cax=cbar_ax, orientation="horizontal", ticks=levels)
cbar.ax.tick_params(labelsize=10)

cbar.set_label('$\mathsf{T_{2m}}$' + ' [\u00b0C]', size=18)
plt.subplots_adjust(top=0.92,
bottom=0.29,
left=0.02,
right=0.98,
hspace=0.2,
wspace=0.05)

plt.show()