You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
 

162 KiB

페키지 로드

In [1]:
%reload_ext watermark
%watermark -v -p numpy,matplotlib,pandas,sklearn,tqdm,tensorflow,rpy2,watermark,feature_engine
CPython 3.6.9
IPython 7.16.2

numpy 1.19.5
matplotlib 3.3.4
pandas 1.1.5
sklearn 0.24.2
tqdm 4.62.3
tensorflow 2.6.2
rpy2 3.4.5
watermark 2.0.2
feature_engine 1.2.0
In [2]:
from rpy2.robjects import pandas2ri
from rpy2.robjects import r
from rpy2.robjects.packages import importr
pandas2ri.activate()

utils = importr('utils')
package_names = ('ranger')
utils.chooseCRANmirror(ind=1)
#utils.install_packages("ranger") # ranger 패키지 설치
Out[2]:
<rpy2.rinterface_lib.sexp.NULLType object at 0x7fcd1de59c08> [RTYPES.NILSXP]
In [17]:
import h2o
from h2o.automl import H2OAutoML
from h2o.estimators.gbm import H2OGradientBoostingEstimator
%matplotlib inline
h2o.no_progress()
In [18]:
import numpy as np
import pandas as pd
import os, re, cv2
from tqdm.auto import tqdm
import matplotlib.pyplot as plt
from feature_engine import transformation as vt
from sklearn.model_selection import train_test_split
import tensorflow as tf
from sklearn.preprocessing import MinMaxScaler
os.environ["CUDA_DEVICE_ORDER"]="PCI_BUS_ID" 
os.environ["CUDA_VISIBLE_DEVICES"]="0"
main_dir="/root/data/dacon/open"

함수 생성

In [19]:
# 색 강조
def img_Contrast(img,clipLimit=3.0,tileGridSize=(8,8)):
    lab=cv2.cvtColor(img, cv2.COLOR_BGR2LAB)
    l,a,b=cv2.split(lab)
    clahe=cv2.createCLAHE(clipLimit=clipLimit,
                          tileGridSize=tileGridSize)
    cl=clahe.apply(l)
    limg=cv2.merge((cl,a,b))
    final = cv2.cvtColor(limg,cv2.COLOR_LAB2BGR)
    return final

# 특정범위 색 추출
def img_extract(return_img,img,lower=(0,0,0), upper=(110,255,200)):
    img_hsv=cv2.cvtColor(img, cv2.COLOR_BGR2HSV)
    img_mask=cv2.inRange(img_hsv, lower, upper)
    img_result= cv2.bitwise_and(return_img, return_img, mask=img_mask)
    img_hsv=cv2.cvtColor(img, cv2.COLOR_HSV2RGB)
    return img_result

# 파일 목록
def file_list(directory):
    def find_files(directory):
        return([f"{directory}/{i}" \
                for i in os.listdir(directory) if re.compile('png$|jpg$').findall(i)])
    out=list()
    if type(directory)==str:
        out=find_files(directory)
    elif type(directory)==list:
        for folder in range(len(directory)):
            [out.append(file) for file in find_files(directory[folder])]
    return(
        sorted(out))

# 이미지 통계량 추출
def rgb_stat(img):
    r_m,g_m,b_m   =np.mean(img,axis=(0,1))
    r_sd,g_sd,b_sd= np.std(img,axis=(0,1))
    return r_m,g_m,b_m,r_sd,g_sd,b_sd

# 무게 산출
def img_to_weight(img,n=15000):
    return (cv2.cvtColor(img,cv2.COLOR_RGB2GRAY)!=0).sum()/n

# 파일 목록
def file_list(directory):
    def find_files(directory):
        return([f"{directory}/{i}" \
                for i in os.listdir(directory) if re.compile('png$|jpg$').findall(i)])
    out=list()
    if type(directory)==str:
        out=find_files(directory)
    elif type(directory)==list:
        for folder in range(len(directory)):
            [out.append(file) for file in find_files(directory[folder])]
    return(
        sorted(out))
# 이미지 변수 생성
def img_feature(dirs):
    df=pd.DataFrame({'img_dirs':dirs})
    for i in tqdm(range(len(dirs))):
        img=cv2.imread(dirs[i])
        raw_img=cv2.resize(img,dsize=(int(img.shape[0]/4),int(img.shape[1]/4)),interpolation=cv2.INTER_CUBIC)
        contrast_img=img_Contrast(img, 3, tileGridSize=(4,3))

        # 시든 잎 추출
        extract_img=img_extract(img,contrast_img, lower=(10,0,0), upper=(30,255,255))
        df.loc[i,'del_leaf']=img_to_weight(extract_img,16900)

        # 청경채 추출
        extract_img=img_extract(img,contrast_img,upper=(90,255,130))# 원본에서 추출
        df.loc[i,'pred_leaf1']=img_to_weight(extract_img,16900)
        contrast_img=img_Contrast(img, 3, tileGridSize=(5,5))
        extract_img=img_extract(img,contrast_img,upper=(77,255,130))# 원본에서 추출
        df.loc[i,'pred_leaf2']=img_to_weight(extract_img,16900)
        df.loc[i,'pred_leaf_mean']=(df.loc[i,'pred_leaf1']+df.loc[i,'pred_leaf2'])/2

        #RGB 추출
        df.loc[i,["r_m","g_m","b_m","r_sd","g_sd","b_sd"]]=rgb_stat(extract_img)
    return df

자료생성

In [20]:
tr_directory=[f"{main_dir}/train/CASE{i:02d}/image" for i in range(1,76)]
tr_img_dirs=file_list(tr_directory)
tr_img_dirs.remove(f'{main_dir}/train/CASE45/image/CASE45_17.png')
te_img_dirs=file_list(f"{main_dir}/test/image")
label_dfs=list()
for file in tqdm([f"{main_dir}/train/CASE{i:02d}/label.csv" for i in range(1,76)]):
    temp_df=pd.read_csv(file)
    for i, img_file in enumerate(temp_df.img_name):
        time_df=pd.read_csv(
            f"{main_dir}/train/CASE{img_file[4:6]}/meta/{img_file.replace('jpg','png').replace('png','csv')}")
        time_df=time_df.sort_values('시간')
        time=time_df.loc[0,'시간']
        temp_df.loc[i,'date']=pd.to_datetime(time).date()
    label_dfs.append(temp_df)
label_df=pd.concat(label_dfs)

label_df['case']=[i[:6] for i in label_df['img_name']]
merge_df=label_df.copy()
merge_df.columns=['img_name','now_weight','date','case']
merge_df=merge_df.drop("img_name",axis=1)
merge_df['date']=merge_df.date+pd.to_timedelta(1,unit='day')
label_df=pd.merge(label_df,merge_df,how='left',on=['case','date'])
del merge_df
  0%|          | 0/75 [00:00<?, ?it/s]
In [21]:
if not('tr_df.csv' in os.listdir("/root/jupyter/데이콘/청경채/input/")):
    tr_df=img_feature(tr_img_dirs)
    te_df=img_feature(te_img_dirs)
    tr_df.to_csv('/root/jupyter/데이콘/청경채/input/tr_df.csv',index=False)
    te_df.to_csv('/root/jupyter/데이콘/청경채/input/te_df.csv',index=False)
else:
    tr_df=pd.read_csv('/root/jupyter/데이콘/청경채/input/tr_df.csv')
    te_df=pd.read_csv('/root/jupyter/데이콘/청경채/input/te_df.csv')
#라벨
for i in tqdm(range(tr_df.shape[0])):
    tr_df.loc[i,['leaf_weight','date','now_weight']]=label_df.loc[
        label_df.img_name==tr_df.img_dirs[i].split('/')[-1],['leaf_weight','date','now_weight']].values[0]
  0%|          | 0/1591 [00:00<?, ?it/s]

삭제할 자료

CASE 2_10, 2_11, 34_01, 40_01, 40_02, 44_01, 52_01, 56_01, 60_20~34, 63_01, 64_01 : 환경자료 결측

CASE 8, 9, 22, 23, 26, 30, 31, 49, 59, 71, 72, 73 : 환경자료 결측

CASE 35_01, 41_01, 44_02, 45_01, 52_02, 53_01, 56_02, 57_01, 63_02 : 부분결측(제거)

CASE 34, 35, 48 : EC 결측

CASE 32_15, 51_11 : Co2 이상

In [22]:
env_na_p=[f"CASE{i}" for i in set([f"60_{i}" for i in range(20,34+1)]).union(
    set(['02_10','02_11','34_01','40_01','40_02','44_01','52_01','56_01','63_01','64_01']))]
env_na_a=[f"CASE{i}" for i in ['08','09','22','23','26','30','31','49','59','71','72','73']]
partial_na=[f"CASE{i}" for i in ["35_01","41_01","44_02","45_01","52_02","53_01","57_01","63_02"]]
ec_na=[f"CASE{i}" for i in ["34","35","48"]]

tr_df['na_label']=False
for i in (env_na_p+env_na_a+partial_na+ec_na):
    tr_df.loc[tr_df['img_dirs'].str.contains(i),"na_label"]=True
In [23]:
for i, filename in tqdm(enumerate(tr_df.img_dirs)):
    temp_df=pd.read_csv(filename.replace('image','meta').replace('jpg','png').replace('png','csv'))
    time_df=time_df.sort_values('시간')
    temp_df.시간=pd.to_datetime(temp_df.시간)
    temp_df['청색광추정광량']=(temp_df['총추정광량']-
                        temp_df['백색광추정광량']+temp_df['적색광추정광량'])[temp_df['청색광추정광량'].isna()]

    aftn_co2=temp_df.loc[temp_df.시간.dt.hour.isin(list(range(9,19))),'CO2관측치'].quantile(.5)
    night_co2=temp_df.loc[temp_df.시간.dt.hour.isin(list(range(19,23))+list(range(0,5))),'CO2관측치'].quantile(.5)
    co2_ratio=aftn_co2/night_co2 # 1보다 낮으면 생육단계, 1보다 크면 발아단계
    zero_ec_cnt=sum(temp_df['EC관측치']==0)
    disease_signal=co2_ratio*zero_ec_cnt

    aftn_ec=temp_df.loc[temp_df.시간.dt.hour.isin(list(range(10,15))),'EC관측치'].quantile(.5)
    night_ec1=temp_df.loc[temp_df.시간.dt.hour.isin(list(range(22,23))),'EC관측치'].quantile(.5)
    night_ec2=temp_df.loc[temp_df.시간.dt.hour.isin(list(range(3,5))),'EC관측치'].quantile(.5)
    m_temp=temp_df.loc[temp_df.시간.dt.hour.isin(list(range(10,18))),'내부온도관측치'].mean(skipna=True)
    m_humidity=temp_df.loc[temp_df.시간.dt.hour.isin(list(range(10,15))),'내부습도관측치'].mean(skipna=True)    
    # 결측자료 처리
    if np.isnan(aftn_ec):
        if tr_df['del_leaf'][i]>10:
            aftn_ec=night_ec1*1.5
        else:
            aftn_ec=night_ec2
    if np.isnan(m_temp):
        m_temp=temp_df['내부온도관측치'].mean(skipna=True)
    if np.isnan(m_humidity):
        m_humidity=temp_df['내부습도관측치'].mean(skipna=True)

    if np.isnan(night_ec1):
        night_ec1=0
    if np.isnan(co2_ratio):
        if tr_df.pred_leaf_mean[i]>50:
            co2_ratio = 0.5
        else:
            co2_ratio = 1.5
    disease_signal=co2_ratio*zero_ec_cnt
    tr_df.loc[i,['co2_ratio','zero_ec_cnt','disease_signal',
                 'aftn_ec','night_ec1','night_ec2','m_temp','m_humidity']]=\
        co2_ratio, zero_ec_cnt, disease_signal, aftn_ec, night_ec1, night_ec2, m_temp, m_humidity
0it [00:00, ?it/s]
In [24]:
for i, filename in tqdm(enumerate(te_df.img_dirs)):
    temp_df=pd.read_csv(filename.replace('image','meta').replace('jpg','png').replace('png','csv'))
    time_df=time_df.sort_values('시간')
    temp_df.시간=pd.to_datetime(temp_df.시간)
    temp_df['청색광추정광량']=(temp_df['총추정광량']-
                        temp_df['백색광추정광량']+temp_df['적색광추정광량'])[temp_df['청색광추정광량'].isna()]

    aftn_co2=temp_df.loc[temp_df.시간.dt.hour.isin(list(range(9,19))),'CO2관측치'].quantile(.5)
    night_co2=temp_df.loc[temp_df.시간.dt.hour.isin(list(range(19,23))+list(range(0,5))),'CO2관측치'].quantile(.5)
    co2_ratio=aftn_co2/night_co2 # 1보다 낮으면 생육단계, 1보다 크면 발아단계
    zero_ec_cnt=sum(temp_df['EC관측치']==0)


    aftn_ec=temp_df.loc[temp_df.시간.dt.hour.isin(list(range(10,15))),'EC관측치'].quantile(.5)
    night_ec1=temp_df.loc[temp_df.시간.dt.hour.isin(list(range(22,23))),'EC관측치'].quantile(.5)
    night_ec2=temp_df.loc[temp_df.시간.dt.hour.isin(list(range(3,5))),'EC관측치'].quantile(.5)
    m_temp=temp_df.loc[temp_df.시간.dt.hour.isin(list(range(10,18))),'내부온도관측치'].mean(skipna=True)
    m_humidity=temp_df.loc[temp_df.시간.dt.hour.isin(list(range(10,15))),'내부습도관측치'].mean(skipna=True)
    # 결측자료 처리
    if np.isnan(aftn_ec):
        if te_df['del_leaf'][i]>10:
            aftn_ec=night_ec1*1.5
        else:
            aftn_ec=night_ec2
    if np.isnan(m_temp):
        m_temp=temp_df['내부온도관측치'].mean(skipna=True)
    if np.isnan(m_humidity):
        m_humidity=temp_df['내부습도관측치'].mean(skipna=True)

    if np.isnan(night_ec1):
        night_ec1=0
    if np.isnan(co2_ratio):
        if te_df.pred_leaf_mean[i]>50:
            co2_ratio = 0.5
        else:
            co2_ratio = 1.5
    disease_signal=co2_ratio*zero_ec_cnt

    te_df.loc[i,['co2_ratio','zero_ec_cnt','disease_signal',
                 'aftn_ec','night_ec1','night_ec2','m_temp','m_humidity']]=\
        co2_ratio, zero_ec_cnt, disease_signal, aftn_ec, night_ec1, night_ec2, m_temp, m_humidity
0it [00:00, ?it/s]

청경채 무게 추정

In [25]:
df=tr_df[['img_dirs','now_weight']].dropna().reset_index()
train,valid=train_test_split(df, test_size=0.33, random_state=42)
In [39]:
df[['img_dirs','now_weight']].isna().sum()
Out[39]:
img_dirs      0
now_weight    0
dtype: int64
In [40]:
def tr_gen():
    df=train[['img_dirs','now_weight']].dropna().reset_index()
    for i in range(df.shape[0]):
        img=cv2.imread(df.img_dirs[i])
        img=cv2.resize(img,dsize=(int(img.shape[0]/4),int(img.shape[1]/4)),interpolation=cv2.INTER_CUBIC)
        contrast_img=img_Contrast(img, 3, tileGridSize=(4,3))
        extract_img1=img_extract(img,contrast_img, lower=(10,0,0), upper=(30,255,255))
        extract_img2=img_extract(img,contrast_img,upper=(90,255,130))# 원본에서 추출
        extract_img=cv2.addWeighted(extract_img1,1,extract_img2,1,1)
        yield (extract_img, df.now_weight[i])
def va_gen():
    df=valid[['img_dirs','now_weight']].dropna().reset_index()
    for i in range(df.shape[0]):
        img=cv2.imread(df.img_dirs[i])
        img=cv2.resize(img,dsize=(int(img.shape[0]/4),int(img.shape[1]/4)),interpolation=cv2.INTER_CUBIC)
        contrast_img=img_Contrast(img, 3, tileGridSize=(4,3))
        extract_img1=img_extract(img,contrast_img, lower=(10,0,0), upper=(30,255,255))
        extract_img2=img_extract(img,contrast_img,upper=(90,255,130))# 원본에서 추출
        extract_img=cv2.addWeighted(extract_img1,1,extract_img2,1,1)
        yield (extract_img, df.now_weight[i])
def check_gen():
    df=tr_df[['img_dirs','now_weight']].dropna().reset_index()
    for i in range(df.shape[0]):
        img=cv2.imread(df.img_dirs[i])
        img=cv2.resize(img,dsize=(int(img.shape[0]/4),int(img.shape[1]/4)),interpolation=cv2.INTER_CUBIC)
        contrast_img=img_Contrast(img, 3, tileGridSize=(4,3))
        extract_img1=img_extract(img,contrast_img, lower=(10,0,0), upper=(30,255,255))
        extract_img2=img_extract(img,contrast_img,upper=(90,255,130))# 원본에서 추출
        extract_img=cv2.addWeighted(extract_img1,1,extract_img2,1,1)
        yield (extract_img, df.now_weight[i])

def te_gen():
    df=te_df[['img_dirs']].reset_index()
    for i in range(df.shape[0]):
        img=cv2.imread(df.img_dirs[i])
        img=cv2.resize(img,dsize=(int(img.shape[0]/4),int(img.shape[1]/4)),interpolation=cv2.INTER_CUBIC)
        contrast_img=img_Contrast(img, 3, tileGridSize=(4,3))
        extract_img1=img_extract(img,contrast_img, lower=(10,0,0), upper=(30,255,255))
        extract_img2=img_extract(img,contrast_img,upper=(90,255,130))# 원본에서 추출
        extract_img=cv2.addWeighted(extract_img1,1,extract_img2,1,1)
        yield (extract_img, np.nan)
def NMAE(true, pred):
    mae = np.mean(np.abs(true-pred))
    score = mae / np.mean(np.abs(true))
    return score

def nmae_keras(y_true, y_pred):
    score = tf.py_function(func=NMAE, inp=[y_true, y_pred], Tout=tf.float32,  name='name')
    return score
In [13]:
tr_data=tf.data.Dataset.from_generator(tr_gen,(tf.float32,tf.float32))
tr_data=tr_data.cache().batch(24).prefetch(buffer_size=10)
va_data=tf.data.Dataset.from_generator(va_gen,(tf.float32,tf.float32))
va_data=va_data.cache().batch(24).prefetch(buffer_size=10)
te_data=tf.data.Dataset.from_generator(te_gen,(tf.float32,tf.float32))
te_data=te_data.cache().batch(24).prefetch(buffer_size=10)
ch_data=tf.data.Dataset.from_generator(check_gen,(tf.float32,tf.float32))
ch_data=ch_data.cache().batch(24).prefetch(buffer_size=10)
In [14]:
next(iter(te_data))[1]
Out[14]:
<tf.Tensor: shape=(24,), dtype=float32, numpy=
array([nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan,
       nan, nan, nan, nan, nan, nan, nan, nan, nan, nan, nan],
      dtype=float32)>
In [15]:
if not('now_weight_best_model_v1.h5' in os.listdir("/root/jupyter/데이콘/청경채/output/")):
    tf.random.set_seed(42)
    inp = tf.keras.Input(shape=(820, 616, 3),dtype=tf.float32)
    conv_1=tf.keras.layers.Conv2D(16,kernel_size=1, activation='LeakyReLU')(inp)
    avg_1=tf.keras.layers.AveragePooling2D()(conv_1)
    conv_2=tf.keras.layers.Conv2D(64,kernel_size=1, activation='LeakyReLU')(avg_1)
    avg_2=tf.keras.layers.AveragePooling2D()(conv_2)
    conv_3=tf.keras.layers.Conv2D(32,kernel_size=1, activation='LeakyReLU')(avg_2)
    avg_3=tf.keras.layers.AveragePooling2D()(conv_3)
    conv_4=tf.keras.layers.Conv2D(8,kernel_size=1, activation='LeakyReLU')(avg_3)
    avg_4=tf.keras.layers.AveragePooling2D()(conv_4)
    flat=tf.keras.layers.Flatten()(avg_4)
    dense_1=tf.keras.layers.Dense(64,activation='ReLU')(flat)
    dense_2=tf.keras.layers.Dense(32,activation='LeakyReLU')(dense_1)
    out=tf.keras.layers.Dense(1,activation='LeakyReLU')(dense_2)
    model = tf.keras.Model(inp, out)
    early = tf.keras.callbacks.EarlyStopping(
        monitor='val_loss',mode="min", patience=10)
    lr_reduce=tf.keras.callbacks.ReduceLROnPlateau(
        monitor='val_loss',patience=3,verbose=1,min_delta=0.001)
    model.compile(optimizer=tf.keras.optimizers.Adam(learning_rate=0.01),
                  loss='mse',metrics=['mae'])
    model.fit(tr_data,verbose=1,callbacks =[early,lr_reduce],
              epochs=50,validation_data=va_data)

    # fine tuning
    for i in model.layers[0:-4]:
        i.trainable=False
    cp_callback = tf.keras.callbacks.ModelCheckpoint(
        filepath='/root/jupyter/데이콘/청경채/model2/now_weight_{val_loss:.2f}.h5',
        monitor='val_loss',mode='min',verbose=1)
    early = tf.keras.callbacks.EarlyStopping(monitor='val_loss',mode="min", patience=50)
    lr_reduce=tf.keras.callbacks.ReduceLROnPlateau(monitor='val_loss',patience=10,verbose=1)
    tf.random.set_seed(42)
    model.fit(tr_data,verbose=1,callbacks =[early,lr_reduce,cp_callback],
              epochs=300,validation_data=va_data)

    file_loss=min([i.split('/')[-1].split('_')[-1].replace('.h5','')
                   for i in os.listdir('/root/jupyter/데이콘/청경채/model2/')])
    model=tf.keras.models.load_model(f'/root/jupyter/데이콘/청경채/model2/now_weight_{file_loss}.h5')
    tf.keras.models.save_model(model,f'/root/jupyter/데이콘/청경채/output/now_weight_best_model_v1.h5')
else:
    model=tf.keras.models.load_model(f'/root/jupyter/데이콘/청경채/output/now_weight_best_model_v1.h5')
In [16]:
tr_df.loc[~tr_df.now_weight.isna(),'cnn_now_weight']=model.predict(ch_data)
te_df['cnn_now_weight']=model.predict(te_data)
In [17]:
plt.scatter(tr_df.cnn_now_weight,tr_df.now_weight)
Out[17]:
<matplotlib.collections.PathCollection at 0x7fcbd0418438>
In [18]:
plt.scatter(tr_df.pred_leaf_mean,tr_df.leaf_weight)
Out[18]:
<matplotlib.collections.PathCollection at 0x7fcbd036ccc0>
In [41]:
tr_df['increase']=((tr_df['leaf_weight']-tr_df['now_weight'])>0).astype('int')
tr_df.loc[tr_df['now_weight'].isna(),'increase']=np.nan

df=tr_df[['del_leaf','r_m','g_m','b_m','co2_ratio','zero_ec_cnt',
          'disease_signal','aftn_ec','night_ec1','night_ec2',
          'pred_leaf_mean','pred_leaf1','pred_leaf2','increase','leaf_weight','img_dirs']
        ].dropna().reset_index()

train, valid=train_test_split(df, test_size=0.33, random_state=42)
scaler = MinMaxScaler()
scaler.fit(
    tr_df[['del_leaf','r_m','g_m','b_m','co2_ratio','zero_ec_cnt',
                   'disease_signal','aftn_ec','night_ec1','night_ec2',
                   'pred_leaf_mean','pred_leaf1','pred_leaf2']].dropna().reset_index()[
        ['del_leaf','r_m','g_m','b_m','co2_ratio','zero_ec_cnt',
                   'disease_signal','aftn_ec','night_ec1','night_ec2',
                   'pred_leaf_mean','pred_leaf1','pred_leaf2']])
Out[41]:
MinMaxScaler()
In [20]:
def tr_gen():
    df=train.dropna().reset_index()
    inp_cols=['del_leaf','r_m','g_m','b_m','co2_ratio','zero_ec_cnt',
                   'disease_signal','aftn_ec','night_ec1','night_ec2',
                   'pred_leaf_mean','pred_leaf1','pred_leaf2']
    class_cols=['del_leaf','r_m','g_m','b_m','co2_ratio', 
                'zero_ec_cnt','disease_signal','aftn_ec','night_ec1','night_ec2']
    reg_cols=['del_leaf','pred_leaf_mean','pred_leaf1','pred_leaf2','co2_ratio','disease_signal']
    df[inp_cols]=scaler.transform(df[inp_cols])

    for i in range(df.shape[0]):
        img=cv2.imread(df.img_dirs[i])
        img=cv2.resize(img,dsize=(int(img.shape[0]/4),int(img.shape[1]/4)),interpolation=cv2.INTER_CUBIC)
        contrast_img=img_Contrast(img, 3, tileGridSize=(4,3))
        extract_img1=img_extract(img,contrast_img, lower=(10,0,0), upper=(30,255,255))
        extract_img2=img_extract(img,contrast_img,upper=(90,255,130))# 원본에서 추출
        extract_img=cv2.addWeighted(extract_img1,1,extract_img2,1,1)
        yield (extract_img, df.loc[i,class_cols], df.loc[i,reg_cols]), (df.increase[i], df.leaf_weight[i])

def va_gen():
    df=valid.dropna().reset_index()
    inp_cols=['del_leaf','r_m','g_m','b_m','co2_ratio','zero_ec_cnt',
                   'disease_signal','aftn_ec','night_ec1','night_ec2',
                   'pred_leaf_mean','pred_leaf1','pred_leaf2']
    class_cols=['del_leaf','r_m','g_m','b_m','co2_ratio', 
                'zero_ec_cnt','disease_signal','aftn_ec','night_ec1','night_ec2']
    reg_cols=['del_leaf','pred_leaf_mean','pred_leaf1','pred_leaf2','co2_ratio','disease_signal']
    df[inp_cols]=scaler.transform(df[inp_cols])

    for i in range(df.shape[0]):
        img=cv2.imread(df.img_dirs[i])
        img=cv2.resize(img,dsize=(int(img.shape[0]/4),int(img.shape[1]/4)),interpolation=cv2.INTER_CUBIC)
        contrast_img=img_Contrast(img, 3, tileGridSize=(4,3))
        extract_img1=img_extract(img,contrast_img, lower=(10,0,0), upper=(30,255,255))
        extract_img2=img_extract(img,contrast_img,upper=(90,255,130))# 원본에서 추출
        extract_img=cv2.addWeighted(extract_img1,1,extract_img2,1,1)
        yield (extract_img, df.loc[i,class_cols], df.loc[i,reg_cols]), (df.increase[i], df.leaf_weight[i])

def check_gen():
    df=tr_df[['del_leaf','r_m','g_m','b_m','co2_ratio','zero_ec_cnt',
          'disease_signal','aftn_ec','night_ec1','night_ec2',
          'pred_leaf_mean','pred_leaf1','pred_leaf2','increase','leaf_weight','img_dirs']
        ].dropna().reset_index()
    inp_cols=['del_leaf','r_m','g_m','b_m','co2_ratio','zero_ec_cnt',
                   'disease_signal','aftn_ec','night_ec1','night_ec2',
                   'pred_leaf_mean','pred_leaf1','pred_leaf2']
    class_cols=['del_leaf','r_m','g_m','b_m','co2_ratio', 
                'zero_ec_cnt','disease_signal','aftn_ec','night_ec1','night_ec2']
    reg_cols=['del_leaf','pred_leaf_mean','pred_leaf1','pred_leaf2','co2_ratio','disease_signal']
    df[inp_cols]=scaler.transform(df[inp_cols])

    for i in range(df.shape[0]):
        img=cv2.imread(df.img_dirs[i])
        img=cv2.resize(img,dsize=(int(img.shape[0]/4),int(img.shape[1]/4)),interpolation=cv2.INTER_CUBIC)
        contrast_img=img_Contrast(img, 3, tileGridSize=(4,3))
        extract_img1=img_extract(img,contrast_img, lower=(10,0,0), upper=(30,255,255))
        extract_img2=img_extract(img,contrast_img,upper=(90,255,130))# 원본에서 추출
        extract_img=cv2.addWeighted(extract_img1,1,extract_img2,1,1)
        yield (extract_img, df.loc[i,class_cols], df.loc[i,reg_cols]), (df.increase[i], df.leaf_weight[i])

def te_gen():
    df=te_df[['del_leaf','r_m','g_m','b_m','co2_ratio','zero_ec_cnt',
          'disease_signal','aftn_ec','night_ec1','night_ec2',
          'pred_leaf_mean','pred_leaf1','pred_leaf2','img_dirs']
        ].dropna().reset_index()
    inp_cols=['del_leaf','r_m','g_m','b_m','co2_ratio','zero_ec_cnt',
                   'disease_signal','aftn_ec','night_ec1','night_ec2',
                   'pred_leaf_mean','pred_leaf1','pred_leaf2']
    class_cols=['del_leaf','r_m','g_m','b_m','co2_ratio', 
                'zero_ec_cnt','disease_signal','aftn_ec','night_ec1','night_ec2']
    reg_cols=['del_leaf','pred_leaf_mean','pred_leaf1','pred_leaf2','co2_ratio','disease_signal']
    df[inp_cols]=scaler.transform(df[inp_cols])

    for i in range(df.shape[0]):
        img=cv2.imread(df.img_dirs[i])
        img=cv2.resize(img,dsize=(int(img.shape[0]/4),int(img.shape[1]/4)),interpolation=cv2.INTER_CUBIC)
        contrast_img=img_Contrast(img, 3, tileGridSize=(4,3))
        extract_img1=img_extract(img,contrast_img, lower=(10,0,0), upper=(30,255,255))
        extract_img2=img_extract(img,contrast_img,upper=(90,255,130))# 원본에서 추출
        extract_img=cv2.addWeighted(extract_img1,1,extract_img2,1,1)
        yield (extract_img, df.loc[i,class_cols], df.loc[i,reg_cols]), (np.nan, np.nan)
In [21]:
tr_data=tf.data.Dataset.from_generator(tr_gen,((tf.float32,tf.float32,tf.float32),(tf.float32,tf.float32)))
tr_data=tr_data.cache().batch(24).prefetch(buffer_size=10)
va_data=tf.data.Dataset.from_generator(va_gen,((tf.float32,tf.float32,tf.float32),(tf.float32,tf.float32)))
va_data=va_data.cache().batch(24).prefetch(buffer_size=10)
te_data=tf.data.Dataset.from_generator(te_gen,((tf.float32,tf.float32,tf.float32),(tf.float32,tf.float32)))
te_data=te_data.cache().batch(24).prefetch(buffer_size=10)
ch_data=tf.data.Dataset.from_generator(check_gen,((tf.float32,tf.float32,tf.float32),(tf.float32,tf.float32)))
ch_data=ch_data.cache().batch(24).prefetch(buffer_size=10)
In [36]:
tf.random.set_seed(42)
now_weight_model=tf.keras.models.load_model(f'/root/jupyter/데이콘/청경채/output/now_weight_best_model_v1.h5')

inp1 = tf.keras.Input(shape=10,dtype=tf.float32,name='inp1')
concat1_1=tf.keras.layers.concatenate([now_weight_model.layers[-2].output,inp1])
dense1_1=tf.keras.layers.Dense(64,activation='ReLU')(concat1_1)
dense1_2=tf.keras.layers.Dense(32,activation='ReLU')(dense1_1)
out1=tf.keras.layers.Dense(1,name="cat_output",activation='sigmoid')(dense1_2)
inp2 = tf.keras.Input(shape=6,dtype=tf.float32,name='inp2')
concat2_1=tf.keras.layers.concatenate([dense1_2,now_weight_model.layers[-1].output,inp2])
dense2_1=tf.keras.layers.Dense(64)(concat2_1)
dense2_2=tf.keras.layers.Dense(32)(dense2_1)
out2=tf.keras.layers.Dense(1,name="reg_output")(dense2_2)
forecast_model=tf.keras.Model(inputs=(now_weight_model.input,inp1,inp2),outputs=(out1,out2))
# tf.keras.utils.plot_model(forecast_model,'/root/jupyter/데이콘/청경채/output/model.png',
#                           show_shapes=True,show_layer_names=True)
cp_callback = tf.keras.callbacks.ModelCheckpoint(
    filepath='/root/jupyter/데이콘/청경채/model2/forecast_weight_{val_loss:.2f}.h5',
    monitor='val_loss',mode='auto',verbose=1)
early = tf.keras.callbacks.EarlyStopping(monitor='val_loss',mode="auto", patience=50)
lr_reduce=tf.keras.callbacks.ReduceLROnPlateau(monitor='val_loss',patience=10,verbose=1)

forecast_model.compile(optimizer=tf.keras.optimizers.Adam(learning_rate=0.01),
              loss={"cat_output":"binary_crossentropy","reg_output":'mse'},
              metrics=['mae'])

forecast_model.fit(tr_data,verbose=1,callbacks =[early,lr_reduce,cp_callback],
          epochs=300,validation_data=va_data)
file_loss=min([i.split('/')[-1].split('_')[-1].replace('.h5','')
               for i in os.listdir('/root/jupyter/데이콘/청경채/model2/') if re.compile('forecast').findall(i)])
forecast_model=tf.keras.models.load_model(f'/root/jupyter/데이콘/청경채/model2/forecast_weight_{file_loss}.h5')
tf.keras.models.save_model(forecast_model,f'/root/jupyter/데이콘/청경채/output/forecast_weight_best_model_v2.h5')
Epoch 1/300
35/35 [==============================] - 131s 4s/step - loss: 15622.3193 - cat_output_loss: 5.6683 - reg_output_loss: 15616.6523 - cat_output_mae: 0.5410 - reg_output_mae: 51.4487 - val_loss: 2787.8098 - val_cat_output_loss: 0.6038 - val_reg_output_loss: 2787.2058 - val_cat_output_mae: 0.0414 - val_reg_output_mae: 30.9375

Epoch 00001: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_2787.81.h5
Epoch 2/300
35/35 [==============================] - 10s 286ms/step - loss: 454.0919 - cat_output_loss: 1.1754 - reg_output_loss: 452.9165 - cat_output_mae: 0.0852 - reg_output_mae: 12.2682 - val_loss: 188.8464 - val_cat_output_loss: 0.6103 - val_reg_output_loss: 188.2361 - val_cat_output_mae: 0.0396 - val_reg_output_mae: 8.6170

Epoch 00002: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_188.85.h5
Epoch 3/300
35/35 [==============================] - 10s 287ms/step - loss: 434.3591 - cat_output_loss: 1.1552 - reg_output_loss: 433.2038 - cat_output_mae: 0.0889 - reg_output_mae: 11.9799 - val_loss: 253.1367 - val_cat_output_loss: 0.6110 - val_reg_output_loss: 252.5257 - val_cat_output_mae: 0.0509 - val_reg_output_mae: 9.2168

Epoch 00003: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_253.14.h5
Epoch 4/300
35/35 [==============================] - 10s 286ms/step - loss: 312.9943 - cat_output_loss: 1.1708 - reg_output_loss: 311.8234 - cat_output_mae: 0.0859 - reg_output_mae: 10.3695 - val_loss: 195.3712 - val_cat_output_loss: 0.6061 - val_reg_output_loss: 194.7652 - val_cat_output_mae: 0.0408 - val_reg_output_mae: 8.2103

Epoch 00004: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_195.37.h5
Epoch 5/300
35/35 [==============================] - 10s 288ms/step - loss: 538.1053 - cat_output_loss: 1.0161 - reg_output_loss: 537.0892 - cat_output_mae: 0.0963 - reg_output_mae: 13.3217 - val_loss: 240.7515 - val_cat_output_loss: 0.5468 - val_reg_output_loss: 240.2048 - val_cat_output_mae: 0.1164 - val_reg_output_mae: 9.1587

Epoch 00005: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_240.75.h5
Epoch 6/300
35/35 [==============================] - 10s 287ms/step - loss: 606.0801 - cat_output_loss: 0.9097 - reg_output_loss: 605.1703 - cat_output_mae: 0.1567 - reg_output_mae: 13.7852 - val_loss: 190.9751 - val_cat_output_loss: 0.6209 - val_reg_output_loss: 190.3542 - val_cat_output_mae: 0.1838 - val_reg_output_mae: 8.7099

Epoch 00006: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_190.98.h5
Epoch 7/300
35/35 [==============================] - 10s 288ms/step - loss: 306.6978 - cat_output_loss: 0.7442 - reg_output_loss: 305.9536 - cat_output_mae: 0.1572 - reg_output_mae: 9.9669 - val_loss: 153.0561 - val_cat_output_loss: 0.4063 - val_reg_output_loss: 152.6498 - val_cat_output_mae: 0.0839 - val_reg_output_mae: 7.8254

Epoch 00007: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_153.06.h5
Epoch 8/300
35/35 [==============================] - 10s 287ms/step - loss: 340.4021 - cat_output_loss: 1.1925 - reg_output_loss: 339.2096 - cat_output_mae: 0.1307 - reg_output_mae: 9.5173 - val_loss: 214.7084 - val_cat_output_loss: 0.6065 - val_reg_output_loss: 214.1019 - val_cat_output_mae: 0.0454 - val_reg_output_mae: 8.8052

Epoch 00008: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_214.71.h5
Epoch 9/300
35/35 [==============================] - 10s 287ms/step - loss: 240.0244 - cat_output_loss: 1.1588 - reg_output_loss: 238.8655 - cat_output_mae: 0.0941 - reg_output_mae: 8.6439 - val_loss: 177.5941 - val_cat_output_loss: 0.6068 - val_reg_output_loss: 176.9873 - val_cat_output_mae: 0.0462 - val_reg_output_mae: 7.9171

Epoch 00009: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_177.59.h5
Epoch 10/300
35/35 [==============================] - 10s 287ms/step - loss: 245.4752 - cat_output_loss: 1.1555 - reg_output_loss: 244.3197 - cat_output_mae: 0.0912 - reg_output_mae: 8.7353 - val_loss: 175.4599 - val_cat_output_loss: 0.6075 - val_reg_output_loss: 174.8524 - val_cat_output_mae: 0.0469 - val_reg_output_mae: 7.6198

Epoch 00010: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_175.46.h5
Epoch 11/300
35/35 [==============================] - 10s 288ms/step - loss: 375.7660 - cat_output_loss: 1.1563 - reg_output_loss: 374.6097 - cat_output_mae: 0.0908 - reg_output_mae: 10.9295 - val_loss: 145.7719 - val_cat_output_loss: 0.6095 - val_reg_output_loss: 145.1624 - val_cat_output_mae: 0.0485 - val_reg_output_mae: 6.5724

Epoch 00011: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_145.77.h5
Epoch 12/300
35/35 [==============================] - 10s 288ms/step - loss: 748.5430 - cat_output_loss: 1.1181 - reg_output_loss: 747.4247 - cat_output_mae: 0.0939 - reg_output_mae: 14.8082 - val_loss: 117.7116 - val_cat_output_loss: 0.5540 - val_reg_output_loss: 117.1576 - val_cat_output_mae: 0.0551 - val_reg_output_mae: 6.6494

Epoch 00012: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_117.71.h5
Epoch 13/300
35/35 [==============================] - 10s 287ms/step - loss: 102.2484 - cat_output_loss: 0.8162 - reg_output_loss: 101.4322 - cat_output_mae: 0.1117 - reg_output_mae: 5.2651 - val_loss: 86.8546 - val_cat_output_loss: 0.4597 - val_reg_output_loss: 86.3949 - val_cat_output_mae: 0.1128 - val_reg_output_mae: 5.1356

Epoch 00013: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_86.85.h5
Epoch 14/300
35/35 [==============================] - 10s 287ms/step - loss: 62.6305 - cat_output_loss: 0.6410 - reg_output_loss: 61.9895 - cat_output_mae: 0.1125 - reg_output_mae: 3.9578 - val_loss: 80.5980 - val_cat_output_loss: 0.3844 - val_reg_output_loss: 80.2136 - val_cat_output_mae: 0.0693 - val_reg_output_mae: 4.8777

Epoch 00014: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_80.60.h5
Epoch 15/300
35/35 [==============================] - 10s 288ms/step - loss: 59.5084 - cat_output_loss: 0.6093 - reg_output_loss: 58.8991 - cat_output_mae: 0.1076 - reg_output_mae: 4.0005 - val_loss: 125.7872 - val_cat_output_loss: 0.3837 - val_reg_output_loss: 125.4035 - val_cat_output_mae: 0.0748 - val_reg_output_mae: 5.4330

Epoch 00015: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_125.79.h5
Epoch 16/300
35/35 [==============================] - 10s 287ms/step - loss: 83.7078 - cat_output_loss: 0.6789 - reg_output_loss: 83.0289 - cat_output_mae: 0.1217 - reg_output_mae: 4.6370 - val_loss: 92.6045 - val_cat_output_loss: 0.4375 - val_reg_output_loss: 92.1670 - val_cat_output_mae: 0.0906 - val_reg_output_mae: 5.1628

Epoch 00016: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_92.60.h5
Epoch 17/300
35/35 [==============================] - 10s 287ms/step - loss: 92.8448 - cat_output_loss: 0.6621 - reg_output_loss: 92.1827 - cat_output_mae: 0.1159 - reg_output_mae: 5.1885 - val_loss: 80.0940 - val_cat_output_loss: 0.4370 - val_reg_output_loss: 79.6571 - val_cat_output_mae: 0.0961 - val_reg_output_mae: 4.9087

Epoch 00017: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_80.09.h5
Epoch 18/300
35/35 [==============================] - 10s 287ms/step - loss: 89.8239 - cat_output_loss: 0.6414 - reg_output_loss: 89.1825 - cat_output_mae: 0.1177 - reg_output_mae: 5.1628 - val_loss: 87.0075 - val_cat_output_loss: 0.4209 - val_reg_output_loss: 86.5866 - val_cat_output_mae: 0.0884 - val_reg_output_mae: 5.0069

Epoch 00018: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_87.01.h5
Epoch 19/300
35/35 [==============================] - 10s 289ms/step - loss: 102.3396 - cat_output_loss: 0.6277 - reg_output_loss: 101.7119 - cat_output_mae: 0.1178 - reg_output_mae: 5.5687 - val_loss: 99.7375 - val_cat_output_loss: 0.4115 - val_reg_output_loss: 99.3259 - val_cat_output_mae: 0.0861 - val_reg_output_mae: 5.2408

Epoch 00019: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_99.74.h5
Epoch 20/300
35/35 [==============================] - 10s 288ms/step - loss: 139.5957 - cat_output_loss: 0.6230 - reg_output_loss: 138.9727 - cat_output_mae: 0.1176 - reg_output_mae: 6.4522 - val_loss: 115.6146 - val_cat_output_loss: 0.4160 - val_reg_output_loss: 115.1986 - val_cat_output_mae: 0.0896 - val_reg_output_mae: 5.5988

Epoch 00020: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_115.61.h5
Epoch 21/300
35/35 [==============================] - 10s 288ms/step - loss: 138.6861 - cat_output_loss: 0.6211 - reg_output_loss: 138.0650 - cat_output_mae: 0.1188 - reg_output_mae: 6.4677 - val_loss: 161.1158 - val_cat_output_loss: 0.4270 - val_reg_output_loss: 160.6888 - val_cat_output_mae: 0.0942 - val_reg_output_mae: 6.9700

Epoch 00021: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_161.12.h5
Epoch 22/300
35/35 [==============================] - 10s 286ms/step - loss: 122.4462 - cat_output_loss: 0.6234 - reg_output_loss: 121.8228 - cat_output_mae: 0.1191 - reg_output_mae: 6.0842 - val_loss: 222.7249 - val_cat_output_loss: 0.4140 - val_reg_output_loss: 222.3109 - val_cat_output_mae: 0.0885 - val_reg_output_mae: 8.5638

Epoch 00022: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_222.72.h5
Epoch 23/300
35/35 [==============================] - 10s 288ms/step - loss: 107.4398 - cat_output_loss: 0.6241 - reg_output_loss: 106.8157 - cat_output_mae: 0.1201 - reg_output_mae: 5.5803 - val_loss: 268.4381 - val_cat_output_loss: 0.4122 - val_reg_output_loss: 268.0260 - val_cat_output_mae: 0.0885 - val_reg_output_mae: 9.5321

Epoch 00023: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_268.44.h5
Epoch 24/300
35/35 [==============================] - 10s 288ms/step - loss: 112.8209 - cat_output_loss: 0.6305 - reg_output_loss: 112.1904 - cat_output_mae: 0.1210 - reg_output_mae: 5.7446 - val_loss: 306.0424 - val_cat_output_loss: 0.3809 - val_reg_output_loss: 305.6615 - val_cat_output_mae: 0.0850 - val_reg_output_mae: 10.1628

Epoch 00024: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_306.04.h5
Epoch 25/300
35/35 [==============================] - 10s 287ms/step - loss: 169.3772 - cat_output_loss: 0.6350 - reg_output_loss: 168.7422 - cat_output_mae: 0.1234 - reg_output_mae: 7.2794 - val_loss: 301.2572 - val_cat_output_loss: 0.3896 - val_reg_output_loss: 300.8676 - val_cat_output_mae: 0.0883 - val_reg_output_mae: 9.8870

Epoch 00025: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_301.26.h5
Epoch 26/300
35/35 [==============================] - 10s 287ms/step - loss: 320.2744 - cat_output_loss: 0.6403 - reg_output_loss: 319.6342 - cat_output_mae: 0.1237 - reg_output_mae: 10.1972 - val_loss: 185.9016 - val_cat_output_loss: 0.3856 - val_reg_output_loss: 185.5161 - val_cat_output_mae: 0.0825 - val_reg_output_mae: 7.4837

Epoch 00026: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_185.90.h5
Epoch 27/300
35/35 [==============================] - 10s 285ms/step - loss: 686.6797 - cat_output_loss: 0.7176 - reg_output_loss: 685.9621 - cat_output_mae: 0.1212 - reg_output_mae: 14.8531 - val_loss: 419.0114 - val_cat_output_loss: 0.5309 - val_reg_output_loss: 418.4806 - val_cat_output_mae: 0.1141 - val_reg_output_mae: 12.7542

Epoch 00027: ReduceLROnPlateau reducing learning rate to 0.0009999999776482583.

Epoch 00027: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_419.01.h5
Epoch 28/300
35/35 [==============================] - 10s 287ms/step - loss: 147.4339 - cat_output_loss: 0.6802 - reg_output_loss: 146.7538 - cat_output_mae: 0.1350 - reg_output_mae: 6.1860 - val_loss: 135.6167 - val_cat_output_loss: 0.3913 - val_reg_output_loss: 135.2254 - val_cat_output_mae: 0.0731 - val_reg_output_mae: 6.1296

Epoch 00028: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_135.62.h5
Epoch 29/300
35/35 [==============================] - 10s 287ms/step - loss: 70.5416 - cat_output_loss: 0.6356 - reg_output_loss: 69.9060 - cat_output_mae: 0.1192 - reg_output_mae: 4.3061 - val_loss: 112.3320 - val_cat_output_loss: 0.3881 - val_reg_output_loss: 111.9439 - val_cat_output_mae: 0.0687 - val_reg_output_mae: 5.5441

Epoch 00029: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_112.33.h5
Epoch 30/300
35/35 [==============================] - 10s 288ms/step - loss: 56.8927 - cat_output_loss: 0.6371 - reg_output_loss: 56.2556 - cat_output_mae: 0.1174 - reg_output_mae: 3.9025 - val_loss: 99.7667 - val_cat_output_loss: 0.3961 - val_reg_output_loss: 99.3706 - val_cat_output_mae: 0.0763 - val_reg_output_mae: 5.2258

Epoch 00030: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_99.77.h5
Epoch 31/300
35/35 [==============================] - 10s 287ms/step - loss: 50.3473 - cat_output_loss: 0.6371 - reg_output_loss: 49.7102 - cat_output_mae: 0.1195 - reg_output_mae: 3.7156 - val_loss: 91.8087 - val_cat_output_loss: 0.3932 - val_reg_output_loss: 91.4155 - val_cat_output_mae: 0.0738 - val_reg_output_mae: 4.9970

Epoch 00031: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_91.81.h5
Epoch 32/300
35/35 [==============================] - 10s 287ms/step - loss: 45.9512 - cat_output_loss: 0.6351 - reg_output_loss: 45.3161 - cat_output_mae: 0.1180 - reg_output_mae: 3.5742 - val_loss: 86.7626 - val_cat_output_loss: 0.3933 - val_reg_output_loss: 86.3693 - val_cat_output_mae: 0.0741 - val_reg_output_mae: 4.8487

Epoch 00032: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_86.76.h5
Epoch 33/300
35/35 [==============================] - 10s 287ms/step - loss: 42.6498 - cat_output_loss: 0.6324 - reg_output_loss: 42.0174 - cat_output_mae: 0.1177 - reg_output_mae: 3.4482 - val_loss: 83.2526 - val_cat_output_loss: 0.3929 - val_reg_output_loss: 82.8597 - val_cat_output_mae: 0.0745 - val_reg_output_mae: 4.7450

Epoch 00033: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_83.25.h5
Epoch 34/300
35/35 [==============================] - 10s 287ms/step - loss: 39.9209 - cat_output_loss: 0.6290 - reg_output_loss: 39.2920 - cat_output_mae: 0.1174 - reg_output_mae: 3.3375 - val_loss: 80.7583 - val_cat_output_loss: 0.3915 - val_reg_output_loss: 80.3669 - val_cat_output_mae: 0.0738 - val_reg_output_mae: 4.6599

Epoch 00034: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_80.76.h5
Epoch 35/300
35/35 [==============================] - 10s 287ms/step - loss: 37.5586 - cat_output_loss: 0.6256 - reg_output_loss: 36.9330 - cat_output_mae: 0.1161 - reg_output_mae: 3.2371 - val_loss: 78.6888 - val_cat_output_loss: 0.3909 - val_reg_output_loss: 78.2979 - val_cat_output_mae: 0.0735 - val_reg_output_mae: 4.5909

Epoch 00035: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_78.69.h5
Epoch 36/300
35/35 [==============================] - 10s 287ms/step - loss: 35.6287 - cat_output_loss: 0.6218 - reg_output_loss: 35.0069 - cat_output_mae: 0.1154 - reg_output_mae: 3.1530 - val_loss: 77.0620 - val_cat_output_loss: 0.3907 - val_reg_output_loss: 76.6713 - val_cat_output_mae: 0.0736 - val_reg_output_mae: 4.5304

Epoch 00036: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_77.06.h5
Epoch 37/300
35/35 [==============================] - 10s 287ms/step - loss: 33.7930 - cat_output_loss: 0.6182 - reg_output_loss: 33.1748 - cat_output_mae: 0.1146 - reg_output_mae: 3.0687 - val_loss: 75.7244 - val_cat_output_loss: 0.3907 - val_reg_output_loss: 75.3337 - val_cat_output_mae: 0.0738 - val_reg_output_mae: 4.4781

Epoch 00037: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_75.72.h5
Epoch 38/300
35/35 [==============================] - 10s 288ms/step - loss: 32.1051 - cat_output_loss: 0.6159 - reg_output_loss: 31.4892 - cat_output_mae: 0.1137 - reg_output_mae: 2.9943 - val_loss: 74.5813 - val_cat_output_loss: 0.3903 - val_reg_output_loss: 74.1911 - val_cat_output_mae: 0.0733 - val_reg_output_mae: 4.4308

Epoch 00038: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_74.58.h5
Epoch 39/300
35/35 [==============================] - 10s 287ms/step - loss: 30.6023 - cat_output_loss: 0.6127 - reg_output_loss: 29.9897 - cat_output_mae: 0.1130 - reg_output_mae: 2.9267 - val_loss: 73.6399 - val_cat_output_loss: 0.3902 - val_reg_output_loss: 73.2497 - val_cat_output_mae: 0.0739 - val_reg_output_mae: 4.3880

Epoch 00039: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_73.64.h5
Epoch 40/300
35/35 [==============================] - 10s 288ms/step - loss: 29.2284 - cat_output_loss: 0.6096 - reg_output_loss: 28.6188 - cat_output_mae: 0.1125 - reg_output_mae: 2.8643 - val_loss: 72.8553 - val_cat_output_loss: 0.3888 - val_reg_output_loss: 72.4665 - val_cat_output_mae: 0.0729 - val_reg_output_mae: 4.3489

Epoch 00040: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_72.86.h5
Epoch 41/300
35/35 [==============================] - 10s 286ms/step - loss: 27.8717 - cat_output_loss: 0.6073 - reg_output_loss: 27.2644 - cat_output_mae: 0.1115 - reg_output_mae: 2.8017 - val_loss: 72.3286 - val_cat_output_loss: 0.3890 - val_reg_output_loss: 71.9396 - val_cat_output_mae: 0.0734 - val_reg_output_mae: 4.3187

Epoch 00041: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_72.33.h5
Epoch 42/300
35/35 [==============================] - 10s 287ms/step - loss: 26.7112 - cat_output_loss: 0.6046 - reg_output_loss: 26.1067 - cat_output_mae: 0.1110 - reg_output_mae: 2.7454 - val_loss: 71.7405 - val_cat_output_loss: 0.3881 - val_reg_output_loss: 71.3524 - val_cat_output_mae: 0.0727 - val_reg_output_mae: 4.2924

Epoch 00042: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_71.74.h5
Epoch 43/300
35/35 [==============================] - 10s 287ms/step - loss: 25.6461 - cat_output_loss: 0.6024 - reg_output_loss: 25.0437 - cat_output_mae: 0.1103 - reg_output_mae: 2.6904 - val_loss: 71.3331 - val_cat_output_loss: 0.3882 - val_reg_output_loss: 70.9449 - val_cat_output_mae: 0.0730 - val_reg_output_mae: 4.2699

Epoch 00043: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_71.33.h5
Epoch 44/300
35/35 [==============================] - 10s 286ms/step - loss: 24.5937 - cat_output_loss: 0.6001 - reg_output_loss: 23.9935 - cat_output_mae: 0.1096 - reg_output_mae: 2.6370 - val_loss: 71.1088 - val_cat_output_loss: 0.3872 - val_reg_output_loss: 70.7216 - val_cat_output_mae: 0.0727 - val_reg_output_mae: 4.2495

Epoch 00044: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_71.11.h5
Epoch 45/300
35/35 [==============================] - 10s 288ms/step - loss: 23.5500 - cat_output_loss: 0.5982 - reg_output_loss: 22.9518 - cat_output_mae: 0.1092 - reg_output_mae: 2.5840 - val_loss: 70.8604 - val_cat_output_loss: 0.3870 - val_reg_output_loss: 70.4733 - val_cat_output_mae: 0.0724 - val_reg_output_mae: 4.2307

Epoch 00045: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_70.86.h5
Epoch 46/300
35/35 [==============================] - 10s 288ms/step - loss: 22.7250 - cat_output_loss: 0.5965 - reg_output_loss: 22.1286 - cat_output_mae: 0.1085 - reg_output_mae: 2.5393 - val_loss: 70.7435 - val_cat_output_loss: 0.3868 - val_reg_output_loss: 70.3567 - val_cat_output_mae: 0.0725 - val_reg_output_mae: 4.2140

Epoch 00046: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_70.74.h5
Epoch 47/300
35/35 [==============================] - 10s 287ms/step - loss: 21.8431 - cat_output_loss: 0.5945 - reg_output_loss: 21.2486 - cat_output_mae: 0.1084 - reg_output_mae: 2.4913 - val_loss: 70.6012 - val_cat_output_loss: 0.3873 - val_reg_output_loss: 70.2139 - val_cat_output_mae: 0.0729 - val_reg_output_mae: 4.2013

Epoch 00047: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_70.60.h5
Epoch 48/300
35/35 [==============================] - 10s 286ms/step - loss: 21.0316 - cat_output_loss: 0.5927 - reg_output_loss: 20.4388 - cat_output_mae: 0.1076 - reg_output_mae: 2.4440 - val_loss: 70.5719 - val_cat_output_loss: 0.3861 - val_reg_output_loss: 70.1858 - val_cat_output_mae: 0.0722 - val_reg_output_mae: 4.1845

Epoch 00048: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_70.57.h5
Epoch 49/300
35/35 [==============================] - 10s 287ms/step - loss: 20.2929 - cat_output_loss: 0.5911 - reg_output_loss: 19.7018 - cat_output_mae: 0.1071 - reg_output_mae: 2.4025 - val_loss: 70.5521 - val_cat_output_loss: 0.3862 - val_reg_output_loss: 70.1659 - val_cat_output_mae: 0.0725 - val_reg_output_mae: 4.1717

Epoch 00049: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_70.55.h5
Epoch 50/300
35/35 [==============================] - 10s 287ms/step - loss: 19.6303 - cat_output_loss: 0.5891 - reg_output_loss: 19.0411 - cat_output_mae: 0.1066 - reg_output_mae: 2.3625 - val_loss: 70.4849 - val_cat_output_loss: 0.3857 - val_reg_output_loss: 70.0993 - val_cat_output_mae: 0.0724 - val_reg_output_mae: 4.1549

Epoch 00050: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_70.48.h5
Epoch 51/300
35/35 [==============================] - 10s 285ms/step - loss: 18.9358 - cat_output_loss: 0.5877 - reg_output_loss: 18.3481 - cat_output_mae: 0.1060 - reg_output_mae: 2.3210 - val_loss: 70.4581 - val_cat_output_loss: 0.3857 - val_reg_output_loss: 70.0724 - val_cat_output_mae: 0.0724 - val_reg_output_mae: 4.1428

Epoch 00051: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_70.46.h5
Epoch 52/300
35/35 [==============================] - 10s 287ms/step - loss: 18.3480 - cat_output_loss: 0.5862 - reg_output_loss: 17.7618 - cat_output_mae: 0.1057 - reg_output_mae: 2.2836 - val_loss: 70.4310 - val_cat_output_loss: 0.3852 - val_reg_output_loss: 70.0458 - val_cat_output_mae: 0.0721 - val_reg_output_mae: 4.1290

Epoch 00052: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_70.43.h5
Epoch 53/300
35/35 [==============================] - 10s 287ms/step - loss: 17.7863 - cat_output_loss: 0.5849 - reg_output_loss: 17.2014 - cat_output_mae: 0.1051 - reg_output_mae: 2.2530 - val_loss: 70.4622 - val_cat_output_loss: 0.3850 - val_reg_output_loss: 70.0772 - val_cat_output_mae: 0.0721 - val_reg_output_mae: 4.1177

Epoch 00053: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_70.46.h5
Epoch 54/300
35/35 [==============================] - 10s 286ms/step - loss: 17.3048 - cat_output_loss: 0.5838 - reg_output_loss: 16.7211 - cat_output_mae: 0.1046 - reg_output_mae: 2.2245 - val_loss: 70.6186 - val_cat_output_loss: 0.3849 - val_reg_output_loss: 70.2336 - val_cat_output_mae: 0.0723 - val_reg_output_mae: 4.1066

Epoch 00054: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_70.62.h5
Epoch 55/300
35/35 [==============================] - 10s 287ms/step - loss: 16.8067 - cat_output_loss: 0.5824 - reg_output_loss: 16.2244 - cat_output_mae: 0.1044 - reg_output_mae: 2.1927 - val_loss: 70.6082 - val_cat_output_loss: 0.3841 - val_reg_output_loss: 70.2241 - val_cat_output_mae: 0.0717 - val_reg_output_mae: 4.0970

Epoch 00055: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_70.61.h5
Epoch 56/300
35/35 [==============================] - 10s 288ms/step - loss: 16.3087 - cat_output_loss: 0.5816 - reg_output_loss: 15.7271 - cat_output_mae: 0.1040 - reg_output_mae: 2.1619 - val_loss: 70.5480 - val_cat_output_loss: 0.3848 - val_reg_output_loss: 70.1632 - val_cat_output_mae: 0.0722 - val_reg_output_mae: 4.0798

Epoch 00056: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_70.55.h5
Epoch 57/300
35/35 [==============================] - 10s 287ms/step - loss: 15.8449 - cat_output_loss: 0.5810 - reg_output_loss: 15.2639 - cat_output_mae: 0.1038 - reg_output_mae: 2.1318 - val_loss: 70.5363 - val_cat_output_loss: 0.3846 - val_reg_output_loss: 70.1517 - val_cat_output_mae: 0.0720 - val_reg_output_mae: 4.0679

Epoch 00057: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_70.54.h5
Epoch 58/300
35/35 [==============================] - 10s 287ms/step - loss: 15.4220 - cat_output_loss: 0.5802 - reg_output_loss: 14.8418 - cat_output_mae: 0.1035 - reg_output_mae: 2.1033 - val_loss: 70.6150 - val_cat_output_loss: 0.3847 - val_reg_output_loss: 70.2303 - val_cat_output_mae: 0.0721 - val_reg_output_mae: 4.0582

Epoch 00058: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_70.62.h5
Epoch 59/300
35/35 [==============================] - 10s 287ms/step - loss: 15.0673 - cat_output_loss: 0.5793 - reg_output_loss: 14.4880 - cat_output_mae: 0.1032 - reg_output_mae: 2.0770 - val_loss: 70.6728 - val_cat_output_loss: 0.3841 - val_reg_output_loss: 70.2887 - val_cat_output_mae: 0.0717 - val_reg_output_mae: 4.0475

Epoch 00059: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_70.67.h5
Epoch 60/300
35/35 [==============================] - 10s 288ms/step - loss: 14.7103 - cat_output_loss: 0.5781 - reg_output_loss: 14.1322 - cat_output_mae: 0.1028 - reg_output_mae: 2.0514 - val_loss: 70.6275 - val_cat_output_loss: 0.3839 - val_reg_output_loss: 70.2436 - val_cat_output_mae: 0.0718 - val_reg_output_mae: 4.0363

Epoch 00060: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_70.63.h5
Epoch 61/300
35/35 [==============================] - 10s 287ms/step - loss: 14.3076 - cat_output_loss: 0.5776 - reg_output_loss: 13.7300 - cat_output_mae: 0.1026 - reg_output_mae: 2.0231 - val_loss: 70.7030 - val_cat_output_loss: 0.3838 - val_reg_output_loss: 70.3192 - val_cat_output_mae: 0.0718 - val_reg_output_mae: 4.0327

Epoch 00061: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_70.70.h5
Epoch 62/300
35/35 [==============================] - 10s 286ms/step - loss: 13.9936 - cat_output_loss: 0.5764 - reg_output_loss: 13.4172 - cat_output_mae: 0.1024 - reg_output_mae: 1.9977 - val_loss: 70.7321 - val_cat_output_loss: 0.3833 - val_reg_output_loss: 70.3488 - val_cat_output_mae: 0.0715 - val_reg_output_mae: 4.0218

Epoch 00062: ReduceLROnPlateau reducing learning rate to 9.999999310821295e-05.

Epoch 00062: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_70.73.h5
Epoch 63/300
35/35 [==============================] - 10s 288ms/step - loss: 12.7274 - cat_output_loss: 0.5744 - reg_output_loss: 12.1530 - cat_output_mae: 0.1023 - reg_output_mae: 1.9270 - val_loss: 70.6252 - val_cat_output_loss: 0.3839 - val_reg_output_loss: 70.2413 - val_cat_output_mae: 0.0719 - val_reg_output_mae: 4.0073

Epoch 00063: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_70.63.h5
Epoch 64/300
35/35 [==============================] - 10s 287ms/step - loss: 12.6844 - cat_output_loss: 0.5741 - reg_output_loss: 12.1103 - cat_output_mae: 0.1025 - reg_output_mae: 1.9212 - val_loss: 70.4924 - val_cat_output_loss: 0.3836 - val_reg_output_loss: 70.1087 - val_cat_output_mae: 0.0719 - val_reg_output_mae: 4.0019

Epoch 00064: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_70.49.h5
Epoch 65/300
35/35 [==============================] - 10s 286ms/step - loss: 12.6251 - cat_output_loss: 0.5738 - reg_output_loss: 12.0513 - cat_output_mae: 0.1023 - reg_output_mae: 1.9171 - val_loss: 70.4100 - val_cat_output_loss: 0.3832 - val_reg_output_loss: 70.0267 - val_cat_output_mae: 0.0716 - val_reg_output_mae: 3.9994

Epoch 00065: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_70.41.h5
Epoch 66/300
35/35 [==============================] - 10s 287ms/step - loss: 12.5666 - cat_output_loss: 0.5736 - reg_output_loss: 11.9930 - cat_output_mae: 0.1021 - reg_output_mae: 1.9114 - val_loss: 70.3713 - val_cat_output_loss: 0.3829 - val_reg_output_loss: 69.9884 - val_cat_output_mae: 0.0714 - val_reg_output_mae: 3.9981

Epoch 00066: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_70.37.h5
Epoch 67/300
35/35 [==============================] - 10s 286ms/step - loss: 12.5310 - cat_output_loss: 0.5735 - reg_output_loss: 11.9576 - cat_output_mae: 0.1020 - reg_output_mae: 1.9092 - val_loss: 70.3720 - val_cat_output_loss: 0.3825 - val_reg_output_loss: 69.9895 - val_cat_output_mae: 0.0711 - val_reg_output_mae: 3.9958

Epoch 00067: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_70.37.h5
Epoch 68/300
35/35 [==============================] - 10s 287ms/step - loss: 12.4968 - cat_output_loss: 0.5733 - reg_output_loss: 11.9235 - cat_output_mae: 0.1018 - reg_output_mae: 1.9048 - val_loss: 70.2411 - val_cat_output_loss: 0.3823 - val_reg_output_loss: 69.8588 - val_cat_output_mae: 0.0710 - val_reg_output_mae: 3.9937

Epoch 00068: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_70.24.h5
Epoch 69/300
35/35 [==============================] - 10s 287ms/step - loss: 12.4308 - cat_output_loss: 0.5733 - reg_output_loss: 11.8575 - cat_output_mae: 0.1017 - reg_output_mae: 1.9001 - val_loss: 70.2050 - val_cat_output_loss: 0.3821 - val_reg_output_loss: 69.8229 - val_cat_output_mae: 0.0709 - val_reg_output_mae: 3.9939

Epoch 00069: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_70.21.h5
Epoch 70/300
35/35 [==============================] - 10s 287ms/step - loss: 12.3907 - cat_output_loss: 0.5732 - reg_output_loss: 11.8175 - cat_output_mae: 0.1016 - reg_output_mae: 1.8968 - val_loss: 70.2340 - val_cat_output_loss: 0.3820 - val_reg_output_loss: 69.8520 - val_cat_output_mae: 0.0708 - val_reg_output_mae: 3.9927

Epoch 00070: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_70.23.h5
Epoch 71/300
35/35 [==============================] - 10s 287ms/step - loss: 12.3582 - cat_output_loss: 0.5731 - reg_output_loss: 11.7851 - cat_output_mae: 0.1016 - reg_output_mae: 1.8933 - val_loss: 70.1877 - val_cat_output_loss: 0.3821 - val_reg_output_loss: 69.8056 - val_cat_output_mae: 0.0708 - val_reg_output_mae: 3.9921

Epoch 00071: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_70.19.h5
Epoch 72/300
35/35 [==============================] - 10s 287ms/step - loss: 12.3130 - cat_output_loss: 0.5730 - reg_output_loss: 11.7400 - cat_output_mae: 0.1015 - reg_output_mae: 1.8895 - val_loss: 70.1302 - val_cat_output_loss: 0.3820 - val_reg_output_loss: 69.7482 - val_cat_output_mae: 0.0708 - val_reg_output_mae: 3.9907

Epoch 00072: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_70.13.h5
Epoch 73/300
35/35 [==============================] - 10s 287ms/step - loss: 12.2789 - cat_output_loss: 0.5729 - reg_output_loss: 11.7060 - cat_output_mae: 0.1015 - reg_output_mae: 1.8862 - val_loss: 70.1235 - val_cat_output_loss: 0.3819 - val_reg_output_loss: 69.7416 - val_cat_output_mae: 0.0707 - val_reg_output_mae: 3.9907

Epoch 00073: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_70.12.h5
Epoch 74/300
35/35 [==============================] - 10s 287ms/step - loss: 12.2509 - cat_output_loss: 0.5729 - reg_output_loss: 11.6779 - cat_output_mae: 0.1014 - reg_output_mae: 1.8833 - val_loss: 70.0696 - val_cat_output_loss: 0.3818 - val_reg_output_loss: 69.6877 - val_cat_output_mae: 0.0707 - val_reg_output_mae: 3.9860

Epoch 00074: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_70.07.h5
Epoch 75/300
35/35 [==============================] - 10s 287ms/step - loss: 12.1999 - cat_output_loss: 0.5729 - reg_output_loss: 11.6270 - cat_output_mae: 0.1013 - reg_output_mae: 1.8794 - val_loss: 70.0837 - val_cat_output_loss: 0.3817 - val_reg_output_loss: 69.7020 - val_cat_output_mae: 0.0706 - val_reg_output_mae: 3.9867

Epoch 00075: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_70.08.h5
Epoch 76/300
35/35 [==============================] - 10s 287ms/step - loss: 12.1690 - cat_output_loss: 0.5728 - reg_output_loss: 11.5963 - cat_output_mae: 0.1013 - reg_output_mae: 1.8762 - val_loss: 69.9897 - val_cat_output_loss: 0.3819 - val_reg_output_loss: 69.6079 - val_cat_output_mae: 0.0707 - val_reg_output_mae: 3.9857

Epoch 00076: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_69.99.h5
Epoch 77/300
35/35 [==============================] - 10s 289ms/step - loss: 12.1388 - cat_output_loss: 0.5727 - reg_output_loss: 11.5661 - cat_output_mae: 0.1013 - reg_output_mae: 1.8735 - val_loss: 70.0215 - val_cat_output_loss: 0.3817 - val_reg_output_loss: 69.6399 - val_cat_output_mae: 0.0706 - val_reg_output_mae: 3.9863

Epoch 00077: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_70.02.h5
Epoch 78/300
35/35 [==============================] - 10s 286ms/step - loss: 12.0955 - cat_output_loss: 0.5727 - reg_output_loss: 11.5228 - cat_output_mae: 0.1012 - reg_output_mae: 1.8709 - val_loss: 70.0194 - val_cat_output_loss: 0.3817 - val_reg_output_loss: 69.6377 - val_cat_output_mae: 0.0706 - val_reg_output_mae: 3.9855

Epoch 00078: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_70.02.h5
Epoch 79/300
35/35 [==============================] - 10s 286ms/step - loss: 12.0621 - cat_output_loss: 0.5728 - reg_output_loss: 11.4893 - cat_output_mae: 0.1012 - reg_output_mae: 1.8672 - val_loss: 70.0091 - val_cat_output_loss: 0.3817 - val_reg_output_loss: 69.6274 - val_cat_output_mae: 0.0706 - val_reg_output_mae: 3.9860

Epoch 00079: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_70.01.h5
Epoch 80/300
35/35 [==============================] - 10s 288ms/step - loss: 12.0329 - cat_output_loss: 0.5726 - reg_output_loss: 11.4603 - cat_output_mae: 0.1012 - reg_output_mae: 1.8654 - val_loss: 70.0350 - val_cat_output_loss: 0.3819 - val_reg_output_loss: 69.6531 - val_cat_output_mae: 0.0707 - val_reg_output_mae: 3.9843

Epoch 00080: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_70.03.h5
Epoch 81/300
35/35 [==============================] - 10s 286ms/step - loss: 11.9846 - cat_output_loss: 0.5725 - reg_output_loss: 11.4121 - cat_output_mae: 0.1012 - reg_output_mae: 1.8609 - val_loss: 69.9299 - val_cat_output_loss: 0.3818 - val_reg_output_loss: 69.5481 - val_cat_output_mae: 0.0706 - val_reg_output_mae: 3.9820

Epoch 00081: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_69.93.h5
Epoch 82/300
35/35 [==============================] - 10s 287ms/step - loss: 11.9410 - cat_output_loss: 0.5725 - reg_output_loss: 11.3685 - cat_output_mae: 0.1012 - reg_output_mae: 1.8571 - val_loss: 69.9123 - val_cat_output_loss: 0.3818 - val_reg_output_loss: 69.5304 - val_cat_output_mae: 0.0706 - val_reg_output_mae: 3.9810

Epoch 00082: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_69.91.h5
Epoch 83/300
35/35 [==============================] - 10s 288ms/step - loss: 11.9130 - cat_output_loss: 0.5723 - reg_output_loss: 11.3407 - cat_output_mae: 0.1012 - reg_output_mae: 1.8552 - val_loss: 69.8308 - val_cat_output_loss: 0.3819 - val_reg_output_loss: 69.4489 - val_cat_output_mae: 0.0707 - val_reg_output_mae: 3.9789

Epoch 00083: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_69.83.h5
Epoch 84/300
35/35 [==============================] - 10s 288ms/step - loss: 11.8740 - cat_output_loss: 0.5723 - reg_output_loss: 11.3017 - cat_output_mae: 0.1012 - reg_output_mae: 1.8522 - val_loss: 69.7952 - val_cat_output_loss: 0.3820 - val_reg_output_loss: 69.4132 - val_cat_output_mae: 0.0707 - val_reg_output_mae: 3.9775

Epoch 00084: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_69.80.h5
Epoch 85/300
35/35 [==============================] - 10s 287ms/step - loss: 11.8476 - cat_output_loss: 0.5722 - reg_output_loss: 11.2755 - cat_output_mae: 0.1012 - reg_output_mae: 1.8496 - val_loss: 69.8787 - val_cat_output_loss: 0.3820 - val_reg_output_loss: 69.4968 - val_cat_output_mae: 0.0707 - val_reg_output_mae: 3.9780

Epoch 00085: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_69.88.h5
Epoch 86/300
35/35 [==============================] - 10s 288ms/step - loss: 11.8077 - cat_output_loss: 0.5721 - reg_output_loss: 11.2356 - cat_output_mae: 0.1011 - reg_output_mae: 1.8467 - val_loss: 69.8585 - val_cat_output_loss: 0.3820 - val_reg_output_loss: 69.4765 - val_cat_output_mae: 0.0708 - val_reg_output_mae: 3.9747

Epoch 00086: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_69.86.h5
Epoch 87/300
35/35 [==============================] - 10s 287ms/step - loss: 11.7561 - cat_output_loss: 0.5721 - reg_output_loss: 11.1841 - cat_output_mae: 0.1011 - reg_output_mae: 1.8414 - val_loss: 69.7568 - val_cat_output_loss: 0.3819 - val_reg_output_loss: 69.3749 - val_cat_output_mae: 0.0707 - val_reg_output_mae: 3.9752

Epoch 00087: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_69.76.h5
Epoch 88/300
35/35 [==============================] - 10s 288ms/step - loss: 11.7155 - cat_output_loss: 0.5720 - reg_output_loss: 11.1435 - cat_output_mae: 0.1011 - reg_output_mae: 1.8384 - val_loss: 69.7394 - val_cat_output_loss: 0.3822 - val_reg_output_loss: 69.3573 - val_cat_output_mae: 0.0709 - val_reg_output_mae: 3.9725

Epoch 00088: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_69.74.h5
Epoch 89/300
35/35 [==============================] - 10s 287ms/step - loss: 11.6857 - cat_output_loss: 0.5719 - reg_output_loss: 11.1138 - cat_output_mae: 0.1011 - reg_output_mae: 1.8360 - val_loss: 69.7439 - val_cat_output_loss: 0.3820 - val_reg_output_loss: 69.3619 - val_cat_output_mae: 0.0708 - val_reg_output_mae: 3.9739

Epoch 00089: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_69.74.h5
Epoch 90/300
35/35 [==============================] - 10s 287ms/step - loss: 11.6733 - cat_output_loss: 0.5718 - reg_output_loss: 11.1015 - cat_output_mae: 0.1011 - reg_output_mae: 1.8344 - val_loss: 69.6997 - val_cat_output_loss: 0.3822 - val_reg_output_loss: 69.3175 - val_cat_output_mae: 0.0709 - val_reg_output_mae: 3.9701

Epoch 00090: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_69.70.h5
Epoch 91/300
35/35 [==============================] - 10s 286ms/step - loss: 11.6127 - cat_output_loss: 0.5718 - reg_output_loss: 11.0409 - cat_output_mae: 0.1011 - reg_output_mae: 1.8303 - val_loss: 69.6992 - val_cat_output_loss: 0.3823 - val_reg_output_loss: 69.3170 - val_cat_output_mae: 0.0710 - val_reg_output_mae: 3.9669

Epoch 00091: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_69.70.h5
Epoch 92/300
35/35 [==============================] - 10s 287ms/step - loss: 11.5932 - cat_output_loss: 0.5717 - reg_output_loss: 11.0215 - cat_output_mae: 0.1011 - reg_output_mae: 1.8267 - val_loss: 69.6589 - val_cat_output_loss: 0.3822 - val_reg_output_loss: 69.2767 - val_cat_output_mae: 0.0709 - val_reg_output_mae: 3.9683

Epoch 00092: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_69.66.h5
Epoch 93/300
35/35 [==============================] - 10s 287ms/step - loss: 11.5588 - cat_output_loss: 0.5716 - reg_output_loss: 10.9871 - cat_output_mae: 0.1011 - reg_output_mae: 1.8253 - val_loss: 69.6754 - val_cat_output_loss: 0.3822 - val_reg_output_loss: 69.2932 - val_cat_output_mae: 0.0709 - val_reg_output_mae: 3.9668

Epoch 00093: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_69.68.h5
Epoch 94/300
35/35 [==============================] - 10s 287ms/step - loss: 11.5108 - cat_output_loss: 0.5716 - reg_output_loss: 10.9393 - cat_output_mae: 0.1011 - reg_output_mae: 1.8215 - val_loss: 69.5907 - val_cat_output_loss: 0.3822 - val_reg_output_loss: 69.2085 - val_cat_output_mae: 0.0710 - val_reg_output_mae: 3.9621

Epoch 00094: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_69.59.h5
Epoch 95/300
35/35 [==============================] - 10s 288ms/step - loss: 11.4963 - cat_output_loss: 0.5714 - reg_output_loss: 10.9248 - cat_output_mae: 0.1011 - reg_output_mae: 1.8200 - val_loss: 69.5147 - val_cat_output_loss: 0.3823 - val_reg_output_loss: 69.1324 - val_cat_output_mae: 0.0710 - val_reg_output_mae: 3.9591

Epoch 00095: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_69.51.h5
Epoch 96/300
35/35 [==============================] - 10s 288ms/step - loss: 11.4532 - cat_output_loss: 0.5715 - reg_output_loss: 10.8817 - cat_output_mae: 0.1010 - reg_output_mae: 1.8160 - val_loss: 69.5841 - val_cat_output_loss: 0.3822 - val_reg_output_loss: 69.2019 - val_cat_output_mae: 0.0709 - val_reg_output_mae: 3.9627

Epoch 00096: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_69.58.h5
Epoch 97/300
35/35 [==============================] - 10s 287ms/step - loss: 11.3892 - cat_output_loss: 0.5714 - reg_output_loss: 10.8177 - cat_output_mae: 0.1010 - reg_output_mae: 1.8113 - val_loss: 69.6099 - val_cat_output_loss: 0.3823 - val_reg_output_loss: 69.2276 - val_cat_output_mae: 0.0710 - val_reg_output_mae: 3.9606

Epoch 00097: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_69.61.h5
Epoch 98/300
35/35 [==============================] - 10s 286ms/step - loss: 11.3666 - cat_output_loss: 0.5713 - reg_output_loss: 10.7953 - cat_output_mae: 0.1010 - reg_output_mae: 1.8096 - val_loss: 69.5698 - val_cat_output_loss: 0.3825 - val_reg_output_loss: 69.1874 - val_cat_output_mae: 0.0711 - val_reg_output_mae: 3.9582

Epoch 00098: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_69.57.h5
Epoch 99/300
35/35 [==============================] - 10s 287ms/step - loss: 11.3028 - cat_output_loss: 0.5713 - reg_output_loss: 10.7315 - cat_output_mae: 0.1011 - reg_output_mae: 1.8044 - val_loss: 69.5428 - val_cat_output_loss: 0.3824 - val_reg_output_loss: 69.1605 - val_cat_output_mae: 0.0710 - val_reg_output_mae: 3.9563

Epoch 00099: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_69.54.h5
Epoch 100/300
35/35 [==============================] - 10s 286ms/step - loss: 11.2709 - cat_output_loss: 0.5712 - reg_output_loss: 10.6997 - cat_output_mae: 0.1010 - reg_output_mae: 1.8018 - val_loss: 69.5584 - val_cat_output_loss: 0.3825 - val_reg_output_loss: 69.1759 - val_cat_output_mae: 0.0711 - val_reg_output_mae: 3.9551

Epoch 00100: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_69.56.h5
Epoch 101/300
35/35 [==============================] - 10s 287ms/step - loss: 11.2397 - cat_output_loss: 0.5711 - reg_output_loss: 10.6685 - cat_output_mae: 0.1010 - reg_output_mae: 1.7989 - val_loss: 69.4582 - val_cat_output_loss: 0.3826 - val_reg_output_loss: 69.0756 - val_cat_output_mae: 0.0712 - val_reg_output_mae: 3.9524

Epoch 00101: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_69.46.h5
Epoch 102/300
35/35 [==============================] - 10s 287ms/step - loss: 11.2097 - cat_output_loss: 0.5710 - reg_output_loss: 10.6387 - cat_output_mae: 0.1010 - reg_output_mae: 1.7961 - val_loss: 69.4740 - val_cat_output_loss: 0.3825 - val_reg_output_loss: 69.0915 - val_cat_output_mae: 0.0711 - val_reg_output_mae: 3.9508

Epoch 00102: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_69.47.h5
Epoch 103/300
35/35 [==============================] - 10s 286ms/step - loss: 11.1522 - cat_output_loss: 0.5709 - reg_output_loss: 10.5813 - cat_output_mae: 0.1009 - reg_output_mae: 1.7910 - val_loss: 69.5182 - val_cat_output_loss: 0.3826 - val_reg_output_loss: 69.1357 - val_cat_output_mae: 0.0712 - val_reg_output_mae: 3.9526

Epoch 00103: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_69.52.h5
Epoch 104/300
35/35 [==============================] - 10s 287ms/step - loss: 11.0933 - cat_output_loss: 0.5709 - reg_output_loss: 10.5223 - cat_output_mae: 0.1009 - reg_output_mae: 1.7876 - val_loss: 69.4061 - val_cat_output_loss: 0.3825 - val_reg_output_loss: 69.0237 - val_cat_output_mae: 0.0711 - val_reg_output_mae: 3.9466

Epoch 00104: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_69.41.h5
Epoch 105/300
35/35 [==============================] - 10s 287ms/step - loss: 11.0682 - cat_output_loss: 0.5707 - reg_output_loss: 10.4974 - cat_output_mae: 0.1009 - reg_output_mae: 1.7843 - val_loss: 69.3828 - val_cat_output_loss: 0.3825 - val_reg_output_loss: 69.0002 - val_cat_output_mae: 0.0712 - val_reg_output_mae: 3.9455

Epoch 00105: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_69.38.h5
Epoch 106/300
35/35 [==============================] - 10s 287ms/step - loss: 11.0109 - cat_output_loss: 0.5707 - reg_output_loss: 10.4402 - cat_output_mae: 0.1009 - reg_output_mae: 1.7801 - val_loss: 69.4069 - val_cat_output_loss: 0.3827 - val_reg_output_loss: 69.0243 - val_cat_output_mae: 0.0712 - val_reg_output_mae: 3.9437

Epoch 00106: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_69.41.h5
Epoch 107/300
35/35 [==============================] - 10s 288ms/step - loss: 10.9933 - cat_output_loss: 0.5705 - reg_output_loss: 10.4229 - cat_output_mae: 0.1009 - reg_output_mae: 1.7780 - val_loss: 69.3728 - val_cat_output_loss: 0.3826 - val_reg_output_loss: 68.9902 - val_cat_output_mae: 0.0712 - val_reg_output_mae: 3.9404

Epoch 00107: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_69.37.h5
Epoch 108/300
35/35 [==============================] - 10s 285ms/step - loss: 10.9378 - cat_output_loss: 0.5704 - reg_output_loss: 10.3674 - cat_output_mae: 0.1008 - reg_output_mae: 1.7740 - val_loss: 69.4076 - val_cat_output_loss: 0.3825 - val_reg_output_loss: 69.0250 - val_cat_output_mae: 0.0711 - val_reg_output_mae: 3.9408

Epoch 00108: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_69.41.h5
Epoch 109/300
35/35 [==============================] - 10s 288ms/step - loss: 10.8894 - cat_output_loss: 0.5702 - reg_output_loss: 10.3192 - cat_output_mae: 0.1008 - reg_output_mae: 1.7690 - val_loss: 69.3548 - val_cat_output_loss: 0.3826 - val_reg_output_loss: 68.9722 - val_cat_output_mae: 0.0712 - val_reg_output_mae: 3.9371

Epoch 00109: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_69.35.h5
Epoch 110/300
35/35 [==============================] - 10s 288ms/step - loss: 10.8371 - cat_output_loss: 0.5702 - reg_output_loss: 10.2669 - cat_output_mae: 0.1007 - reg_output_mae: 1.7658 - val_loss: 69.3837 - val_cat_output_loss: 0.3825 - val_reg_output_loss: 69.0012 - val_cat_output_mae: 0.0712 - val_reg_output_mae: 3.9361

Epoch 00110: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_69.38.h5
Epoch 111/300
35/35 [==============================] - 10s 287ms/step - loss: 10.7890 - cat_output_loss: 0.5700 - reg_output_loss: 10.2190 - cat_output_mae: 0.1007 - reg_output_mae: 1.7607 - val_loss: 69.4038 - val_cat_output_loss: 0.3824 - val_reg_output_loss: 69.0214 - val_cat_output_mae: 0.0711 - val_reg_output_mae: 3.9310

Epoch 00111: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_69.40.h5
Epoch 112/300
35/35 [==============================] - 10s 287ms/step - loss: 10.7464 - cat_output_loss: 0.5700 - reg_output_loss: 10.1764 - cat_output_mae: 0.1006 - reg_output_mae: 1.7583 - val_loss: 69.3871 - val_cat_output_loss: 0.3822 - val_reg_output_loss: 69.0049 - val_cat_output_mae: 0.0710 - val_reg_output_mae: 3.9335

Epoch 00112: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_69.39.h5
Epoch 113/300
35/35 [==============================] - 10s 287ms/step - loss: 10.6868 - cat_output_loss: 0.5697 - reg_output_loss: 10.1170 - cat_output_mae: 0.1006 - reg_output_mae: 1.7531 - val_loss: 69.3838 - val_cat_output_loss: 0.3824 - val_reg_output_loss: 69.0013 - val_cat_output_mae: 0.0712 - val_reg_output_mae: 3.9308

Epoch 00113: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_69.38.h5
Epoch 114/300
35/35 [==============================] - 10s 287ms/step - loss: 10.6365 - cat_output_loss: 0.5697 - reg_output_loss: 10.0667 - cat_output_mae: 0.1006 - reg_output_mae: 1.7487 - val_loss: 69.4624 - val_cat_output_loss: 0.3821 - val_reg_output_loss: 69.0804 - val_cat_output_mae: 0.0710 - val_reg_output_mae: 3.9318

Epoch 00114: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_69.46.h5
Epoch 115/300
35/35 [==============================] - 10s 287ms/step - loss: 10.5849 - cat_output_loss: 0.5695 - reg_output_loss: 10.0154 - cat_output_mae: 0.1005 - reg_output_mae: 1.7442 - val_loss: 69.3254 - val_cat_output_loss: 0.3823 - val_reg_output_loss: 68.9432 - val_cat_output_mae: 0.0711 - val_reg_output_mae: 3.9280

Epoch 00115: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_69.33.h5
Epoch 116/300
35/35 [==============================] - 10s 287ms/step - loss: 10.5456 - cat_output_loss: 0.5694 - reg_output_loss: 9.9762 - cat_output_mae: 0.1005 - reg_output_mae: 1.7426 - val_loss: 69.4325 - val_cat_output_loss: 0.3823 - val_reg_output_loss: 69.0502 - val_cat_output_mae: 0.0712 - val_reg_output_mae: 3.9315

Epoch 00116: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_69.43.h5
Epoch 117/300
35/35 [==============================] - 10s 287ms/step - loss: 10.4906 - cat_output_loss: 0.5693 - reg_output_loss: 9.9212 - cat_output_mae: 0.1005 - reg_output_mae: 1.7376 - val_loss: 69.3397 - val_cat_output_loss: 0.3823 - val_reg_output_loss: 68.9574 - val_cat_output_mae: 0.0712 - val_reg_output_mae: 3.9262

Epoch 00117: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_69.34.h5
Epoch 118/300
35/35 [==============================] - 10s 287ms/step - loss: 10.4391 - cat_output_loss: 0.5692 - reg_output_loss: 9.8699 - cat_output_mae: 0.1005 - reg_output_mae: 1.7323 - val_loss: 69.4208 - val_cat_output_loss: 0.3822 - val_reg_output_loss: 69.0385 - val_cat_output_mae: 0.0712 - val_reg_output_mae: 3.9276

Epoch 00118: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_69.42.h5
Epoch 119/300
35/35 [==============================] - 10s 287ms/step - loss: 10.3881 - cat_output_loss: 0.5691 - reg_output_loss: 9.8190 - cat_output_mae: 0.1004 - reg_output_mae: 1.7296 - val_loss: 69.3950 - val_cat_output_loss: 0.3824 - val_reg_output_loss: 69.0126 - val_cat_output_mae: 0.0712 - val_reg_output_mae: 3.9287

Epoch 00119: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_69.40.h5
Epoch 120/300
35/35 [==============================] - 10s 287ms/step - loss: 10.3246 - cat_output_loss: 0.5689 - reg_output_loss: 9.7557 - cat_output_mae: 0.1004 - reg_output_mae: 1.7244 - val_loss: 69.3754 - val_cat_output_loss: 0.3824 - val_reg_output_loss: 68.9929 - val_cat_output_mae: 0.0713 - val_reg_output_mae: 3.9258

Epoch 00120: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_69.38.h5
Epoch 121/300
35/35 [==============================] - 10s 287ms/step - loss: 10.2583 - cat_output_loss: 0.5688 - reg_output_loss: 9.6894 - cat_output_mae: 0.1004 - reg_output_mae: 1.7193 - val_loss: 69.4129 - val_cat_output_loss: 0.3820 - val_reg_output_loss: 69.0309 - val_cat_output_mae: 0.0710 - val_reg_output_mae: 3.9270

Epoch 00121: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_69.41.h5
Epoch 122/300
35/35 [==============================] - 10s 287ms/step - loss: 10.2145 - cat_output_loss: 0.5687 - reg_output_loss: 9.6458 - cat_output_mae: 0.1003 - reg_output_mae: 1.7145 - val_loss: 69.4638 - val_cat_output_loss: 0.3825 - val_reg_output_loss: 69.0813 - val_cat_output_mae: 0.0714 - val_reg_output_mae: 3.9260

Epoch 00122: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_69.46.h5
Epoch 123/300
35/35 [==============================] - 10s 286ms/step - loss: 10.1691 - cat_output_loss: 0.5685 - reg_output_loss: 9.6006 - cat_output_mae: 0.1004 - reg_output_mae: 1.7120 - val_loss: 69.3110 - val_cat_output_loss: 0.3820 - val_reg_output_loss: 68.9290 - val_cat_output_mae: 0.0711 - val_reg_output_mae: 3.9225

Epoch 00123: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_69.31.h5
Epoch 124/300
35/35 [==============================] - 10s 287ms/step - loss: 10.1023 - cat_output_loss: 0.5684 - reg_output_loss: 9.5339 - cat_output_mae: 0.1002 - reg_output_mae: 1.7057 - val_loss: 69.4372 - val_cat_output_loss: 0.3821 - val_reg_output_loss: 69.0551 - val_cat_output_mae: 0.0712 - val_reg_output_mae: 3.9262

Epoch 00124: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_69.44.h5
Epoch 125/300
35/35 [==============================] - 10s 288ms/step - loss: 10.0365 - cat_output_loss: 0.5683 - reg_output_loss: 9.4682 - cat_output_mae: 0.1002 - reg_output_mae: 1.7020 - val_loss: 69.4354 - val_cat_output_loss: 0.3819 - val_reg_output_loss: 69.0534 - val_cat_output_mae: 0.0711 - val_reg_output_mae: 3.9242

Epoch 00125: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_69.44.h5
Epoch 126/300
35/35 [==============================] - 10s 288ms/step - loss: 9.9994 - cat_output_loss: 0.5681 - reg_output_loss: 9.4313 - cat_output_mae: 0.1001 - reg_output_mae: 1.6974 - val_loss: 69.4569 - val_cat_output_loss: 0.3821 - val_reg_output_loss: 69.0748 - val_cat_output_mae: 0.0712 - val_reg_output_mae: 3.9245

Epoch 00126: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_69.46.h5
Epoch 127/300
35/35 [==============================] - 10s 288ms/step - loss: 9.9481 - cat_output_loss: 0.5680 - reg_output_loss: 9.3802 - cat_output_mae: 0.1002 - reg_output_mae: 1.6919 - val_loss: 69.4212 - val_cat_output_loss: 0.3819 - val_reg_output_loss: 69.0393 - val_cat_output_mae: 0.0711 - val_reg_output_mae: 3.9220

Epoch 00127: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_69.42.h5
Epoch 128/300
35/35 [==============================] - 10s 287ms/step - loss: 9.8832 - cat_output_loss: 0.5680 - reg_output_loss: 9.3153 - cat_output_mae: 0.1001 - reg_output_mae: 1.6868 - val_loss: 69.4273 - val_cat_output_loss: 0.3822 - val_reg_output_loss: 69.0451 - val_cat_output_mae: 0.0713 - val_reg_output_mae: 3.9215

Epoch 00128: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_69.43.h5
Epoch 129/300
35/35 [==============================] - 10s 287ms/step - loss: 9.8287 - cat_output_loss: 0.5677 - reg_output_loss: 9.2610 - cat_output_mae: 0.1002 - reg_output_mae: 1.6821 - val_loss: 69.4234 - val_cat_output_loss: 0.3820 - val_reg_output_loss: 69.0414 - val_cat_output_mae: 0.0712 - val_reg_output_mae: 3.9222

Epoch 00129: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_69.42.h5
Epoch 130/300
35/35 [==============================] - 10s 286ms/step - loss: 9.7872 - cat_output_loss: 0.5676 - reg_output_loss: 9.2196 - cat_output_mae: 0.1001 - reg_output_mae: 1.6778 - val_loss: 69.5360 - val_cat_output_loss: 0.3821 - val_reg_output_loss: 69.1539 - val_cat_output_mae: 0.0713 - val_reg_output_mae: 3.9242

Epoch 00130: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_69.54.h5
Epoch 131/300
35/35 [==============================] - 10s 287ms/step - loss: 9.7003 - cat_output_loss: 0.5676 - reg_output_loss: 9.1327 - cat_output_mae: 0.1000 - reg_output_mae: 1.6701 - val_loss: 69.5886 - val_cat_output_loss: 0.3818 - val_reg_output_loss: 69.2068 - val_cat_output_mae: 0.0712 - val_reg_output_mae: 3.9266

Epoch 00131: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_69.59.h5
Epoch 132/300
35/35 [==============================] - 10s 288ms/step - loss: 9.6375 - cat_output_loss: 0.5674 - reg_output_loss: 9.0701 - cat_output_mae: 0.1001 - reg_output_mae: 1.6660 - val_loss: 69.5146 - val_cat_output_loss: 0.3824 - val_reg_output_loss: 69.1322 - val_cat_output_mae: 0.0716 - val_reg_output_mae: 3.9232

Epoch 00132: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_69.51.h5
Epoch 133/300
35/35 [==============================] - 10s 288ms/step - loss: 9.5851 - cat_output_loss: 0.5672 - reg_output_loss: 9.0179 - cat_output_mae: 0.1002 - reg_output_mae: 1.6604 - val_loss: 69.4511 - val_cat_output_loss: 0.3819 - val_reg_output_loss: 69.0692 - val_cat_output_mae: 0.0713 - val_reg_output_mae: 3.9191

Epoch 00133: ReduceLROnPlateau reducing learning rate to 9.999999019782991e-06.

Epoch 00133: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_69.45.h5
Epoch 134/300
35/35 [==============================] - 10s 286ms/step - loss: 9.3222 - cat_output_loss: 0.5667 - reg_output_loss: 8.7555 - cat_output_mae: 0.1001 - reg_output_mae: 1.6391 - val_loss: 68.7531 - val_cat_output_loss: 0.3811 - val_reg_output_loss: 68.3720 - val_cat_output_mae: 0.0709 - val_reg_output_mae: 3.8965

Epoch 00134: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_68.75.h5
Epoch 135/300
35/35 [==============================] - 10s 287ms/step - loss: 9.1026 - cat_output_loss: 0.5666 - reg_output_loss: 8.5361 - cat_output_mae: 0.0999 - reg_output_mae: 1.6190 - val_loss: 68.6452 - val_cat_output_loss: 0.3809 - val_reg_output_loss: 68.2643 - val_cat_output_mae: 0.0707 - val_reg_output_mae: 3.8902

Epoch 00135: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_68.65.h5
Epoch 136/300
35/35 [==============================] - 10s 287ms/step - loss: 9.1264 - cat_output_loss: 0.5665 - reg_output_loss: 8.5599 - cat_output_mae: 0.0999 - reg_output_mae: 1.6214 - val_loss: 68.6381 - val_cat_output_loss: 0.3808 - val_reg_output_loss: 68.2573 - val_cat_output_mae: 0.0707 - val_reg_output_mae: 3.8895

Epoch 00136: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_68.64.h5
Epoch 137/300
35/35 [==============================] - 10s 287ms/step - loss: 9.1191 - cat_output_loss: 0.5665 - reg_output_loss: 8.5527 - cat_output_mae: 0.0998 - reg_output_mae: 1.6204 - val_loss: 68.6269 - val_cat_output_loss: 0.3807 - val_reg_output_loss: 68.2462 - val_cat_output_mae: 0.0706 - val_reg_output_mae: 3.8900

Epoch 00137: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_68.63.h5
Epoch 138/300
35/35 [==============================] - 10s 288ms/step - loss: 9.1090 - cat_output_loss: 0.5664 - reg_output_loss: 8.5426 - cat_output_mae: 0.0998 - reg_output_mae: 1.6195 - val_loss: 68.6222 - val_cat_output_loss: 0.3806 - val_reg_output_loss: 68.2416 - val_cat_output_mae: 0.0706 - val_reg_output_mae: 3.8907

Epoch 00138: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_68.62.h5
Epoch 139/300
35/35 [==============================] - 10s 288ms/step - loss: 9.0997 - cat_output_loss: 0.5664 - reg_output_loss: 8.5333 - cat_output_mae: 0.0998 - reg_output_mae: 1.6181 - val_loss: 68.5874 - val_cat_output_loss: 0.3806 - val_reg_output_loss: 68.2068 - val_cat_output_mae: 0.0706 - val_reg_output_mae: 3.8868

Epoch 00139: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_68.59.h5
Epoch 140/300
35/35 [==============================] - 10s 288ms/step - loss: 9.0784 - cat_output_loss: 0.5664 - reg_output_loss: 8.5120 - cat_output_mae: 0.0998 - reg_output_mae: 1.6163 - val_loss: 68.6132 - val_cat_output_loss: 0.3805 - val_reg_output_loss: 68.2327 - val_cat_output_mae: 0.0705 - val_reg_output_mae: 3.8862

Epoch 00140: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_68.61.h5
Epoch 141/300
35/35 [==============================] - 10s 286ms/step - loss: 9.0757 - cat_output_loss: 0.5664 - reg_output_loss: 8.5093 - cat_output_mae: 0.0998 - reg_output_mae: 1.6163 - val_loss: 68.5984 - val_cat_output_loss: 0.3804 - val_reg_output_loss: 68.2179 - val_cat_output_mae: 0.0705 - val_reg_output_mae: 3.8843

Epoch 00141: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_68.60.h5
Epoch 142/300
35/35 [==============================] - 10s 288ms/step - loss: 9.0606 - cat_output_loss: 0.5664 - reg_output_loss: 8.4942 - cat_output_mae: 0.0998 - reg_output_mae: 1.6145 - val_loss: 68.6158 - val_cat_output_loss: 0.3804 - val_reg_output_loss: 68.2354 - val_cat_output_mae: 0.0705 - val_reg_output_mae: 3.8874

Epoch 00142: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_68.62.h5
Epoch 143/300
35/35 [==============================] - 10s 287ms/step - loss: 9.0572 - cat_output_loss: 0.5663 - reg_output_loss: 8.4909 - cat_output_mae: 0.0997 - reg_output_mae: 1.6138 - val_loss: 68.6376 - val_cat_output_loss: 0.3804 - val_reg_output_loss: 68.2572 - val_cat_output_mae: 0.0705 - val_reg_output_mae: 3.8870

Epoch 00143: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_68.64.h5
Epoch 144/300
35/35 [==============================] - 10s 286ms/step - loss: 9.0495 - cat_output_loss: 0.5663 - reg_output_loss: 8.4832 - cat_output_mae: 0.0997 - reg_output_mae: 1.6130 - val_loss: 68.6034 - val_cat_output_loss: 0.3804 - val_reg_output_loss: 68.2230 - val_cat_output_mae: 0.0705 - val_reg_output_mae: 3.8843

Epoch 00144: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_68.60.h5
Epoch 145/300
35/35 [==============================] - 10s 286ms/step - loss: 9.0430 - cat_output_loss: 0.5663 - reg_output_loss: 8.4767 - cat_output_mae: 0.0997 - reg_output_mae: 1.6120 - val_loss: 68.5439 - val_cat_output_loss: 0.3804 - val_reg_output_loss: 68.1636 - val_cat_output_mae: 0.0705 - val_reg_output_mae: 3.8827

Epoch 00145: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_68.54.h5
Epoch 146/300
35/35 [==============================] - 10s 286ms/step - loss: 9.0444 - cat_output_loss: 0.5662 - reg_output_loss: 8.4782 - cat_output_mae: 0.0997 - reg_output_mae: 1.6116 - val_loss: 68.5788 - val_cat_output_loss: 0.3803 - val_reg_output_loss: 68.1985 - val_cat_output_mae: 0.0704 - val_reg_output_mae: 3.8832

Epoch 00146: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_68.58.h5
Epoch 147/300
35/35 [==============================] - 10s 287ms/step - loss: 9.0369 - cat_output_loss: 0.5662 - reg_output_loss: 8.4707 - cat_output_mae: 0.0997 - reg_output_mae: 1.6109 - val_loss: 68.5961 - val_cat_output_loss: 0.3803 - val_reg_output_loss: 68.2158 - val_cat_output_mae: 0.0704 - val_reg_output_mae: 3.8842

Epoch 00147: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_68.60.h5
Epoch 148/300
35/35 [==============================] - 10s 288ms/step - loss: 9.0185 - cat_output_loss: 0.5662 - reg_output_loss: 8.4523 - cat_output_mae: 0.0997 - reg_output_mae: 1.6092 - val_loss: 68.6357 - val_cat_output_loss: 0.3803 - val_reg_output_loss: 68.2554 - val_cat_output_mae: 0.0704 - val_reg_output_mae: 3.8836

Epoch 00148: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_68.64.h5
Epoch 149/300
35/35 [==============================] - 10s 287ms/step - loss: 9.0169 - cat_output_loss: 0.5662 - reg_output_loss: 8.4507 - cat_output_mae: 0.0997 - reg_output_mae: 1.6086 - val_loss: 68.6179 - val_cat_output_loss: 0.3802 - val_reg_output_loss: 68.2376 - val_cat_output_mae: 0.0704 - val_reg_output_mae: 3.8847

Epoch 00149: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_68.62.h5
Epoch 150/300
35/35 [==============================] - 10s 286ms/step - loss: 9.0098 - cat_output_loss: 0.5662 - reg_output_loss: 8.4436 - cat_output_mae: 0.0997 - reg_output_mae: 1.6084 - val_loss: 68.6199 - val_cat_output_loss: 0.3802 - val_reg_output_loss: 68.2397 - val_cat_output_mae: 0.0704 - val_reg_output_mae: 3.8859

Epoch 00150: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_68.62.h5
Epoch 151/300
35/35 [==============================] - 10s 286ms/step - loss: 9.0049 - cat_output_loss: 0.5662 - reg_output_loss: 8.4387 - cat_output_mae: 0.0997 - reg_output_mae: 1.6080 - val_loss: 68.6474 - val_cat_output_loss: 0.3802 - val_reg_output_loss: 68.2672 - val_cat_output_mae: 0.0704 - val_reg_output_mae: 3.8835

Epoch 00151: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_68.65.h5
Epoch 152/300
35/35 [==============================] - 10s 289ms/step - loss: 8.9860 - cat_output_loss: 0.5662 - reg_output_loss: 8.4199 - cat_output_mae: 0.0997 - reg_output_mae: 1.6061 - val_loss: 68.6024 - val_cat_output_loss: 0.3802 - val_reg_output_loss: 68.2222 - val_cat_output_mae: 0.0704 - val_reg_output_mae: 3.8810

Epoch 00152: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_68.60.h5
Epoch 153/300
35/35 [==============================] - 10s 287ms/step - loss: 8.9881 - cat_output_loss: 0.5662 - reg_output_loss: 8.4219 - cat_output_mae: 0.0997 - reg_output_mae: 1.6055 - val_loss: 68.6628 - val_cat_output_loss: 0.3801 - val_reg_output_loss: 68.2826 - val_cat_output_mae: 0.0704 - val_reg_output_mae: 3.8823

Epoch 00153: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_68.66.h5
Epoch 154/300
35/35 [==============================] - 10s 287ms/step - loss: 8.9801 - cat_output_loss: 0.5662 - reg_output_loss: 8.4139 - cat_output_mae: 0.0997 - reg_output_mae: 1.6049 - val_loss: 68.6113 - val_cat_output_loss: 0.3801 - val_reg_output_loss: 68.2312 - val_cat_output_mae: 0.0704 - val_reg_output_mae: 3.8820

Epoch 00154: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_68.61.h5
Epoch 155/300
35/35 [==============================] - 10s 285ms/step - loss: 8.9747 - cat_output_loss: 0.5662 - reg_output_loss: 8.4085 - cat_output_mae: 0.0997 - reg_output_mae: 1.6038 - val_loss: 68.6738 - val_cat_output_loss: 0.3801 - val_reg_output_loss: 68.2937 - val_cat_output_mae: 0.0703 - val_reg_output_mae: 3.8820

Epoch 00155: ReduceLROnPlateau reducing learning rate to 9.99999883788405e-07.

Epoch 00155: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_68.67.h5
Epoch 156/300
35/35 [==============================] - 10s 287ms/step - loss: 8.8632 - cat_output_loss: 0.5661 - reg_output_loss: 8.2970 - cat_output_mae: 0.0996 - reg_output_mae: 1.5927 - val_loss: 68.5783 - val_cat_output_loss: 0.3801 - val_reg_output_loss: 68.1982 - val_cat_output_mae: 0.0704 - val_reg_output_mae: 3.8803

Epoch 00156: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_68.58.h5
Epoch 157/300
35/35 [==============================] - 10s 288ms/step - loss: 8.8539 - cat_output_loss: 0.5661 - reg_output_loss: 8.2878 - cat_output_mae: 0.0997 - reg_output_mae: 1.5923 - val_loss: 68.6254 - val_cat_output_loss: 0.3802 - val_reg_output_loss: 68.2452 - val_cat_output_mae: 0.0704 - val_reg_output_mae: 3.8821

Epoch 00157: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_68.63.h5
Epoch 158/300
35/35 [==============================] - 10s 287ms/step - loss: 8.8472 - cat_output_loss: 0.5661 - reg_output_loss: 8.2810 - cat_output_mae: 0.0997 - reg_output_mae: 1.5922 - val_loss: 68.6180 - val_cat_output_loss: 0.3802 - val_reg_output_loss: 68.2378 - val_cat_output_mae: 0.0704 - val_reg_output_mae: 3.8814

Epoch 00158: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_68.62.h5
Epoch 159/300
35/35 [==============================] - 10s 287ms/step - loss: 8.8599 - cat_output_loss: 0.5661 - reg_output_loss: 8.2938 - cat_output_mae: 0.0997 - reg_output_mae: 1.5928 - val_loss: 68.6443 - val_cat_output_loss: 0.3802 - val_reg_output_loss: 68.2640 - val_cat_output_mae: 0.0704 - val_reg_output_mae: 3.8814

Epoch 00159: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_68.64.h5
Epoch 160/300
35/35 [==============================] - 10s 288ms/step - loss: 8.8647 - cat_output_loss: 0.5661 - reg_output_loss: 8.2985 - cat_output_mae: 0.0997 - reg_output_mae: 1.5932 - val_loss: 68.6693 - val_cat_output_loss: 0.3803 - val_reg_output_loss: 68.2890 - val_cat_output_mae: 0.0704 - val_reg_output_mae: 3.8817

Epoch 00160: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_68.67.h5
Epoch 161/300
35/35 [==============================] - 10s 286ms/step - loss: 8.8604 - cat_output_loss: 0.5661 - reg_output_loss: 8.2943 - cat_output_mae: 0.0997 - reg_output_mae: 1.5932 - val_loss: 68.6856 - val_cat_output_loss: 0.3802 - val_reg_output_loss: 68.3053 - val_cat_output_mae: 0.0704 - val_reg_output_mae: 3.8828

Epoch 00161: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_68.69.h5
Epoch 162/300
35/35 [==============================] - 10s 288ms/step - loss: 8.8576 - cat_output_loss: 0.5661 - reg_output_loss: 8.2914 - cat_output_mae: 0.0997 - reg_output_mae: 1.5932 - val_loss: 68.7134 - val_cat_output_loss: 0.3802 - val_reg_output_loss: 68.3332 - val_cat_output_mae: 0.0704 - val_reg_output_mae: 3.8837

Epoch 00162: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_68.71.h5
Epoch 163/300
35/35 [==============================] - 10s 286ms/step - loss: 8.8563 - cat_output_loss: 0.5661 - reg_output_loss: 8.2902 - cat_output_mae: 0.0997 - reg_output_mae: 1.5930 - val_loss: 68.6799 - val_cat_output_loss: 0.3802 - val_reg_output_loss: 68.2996 - val_cat_output_mae: 0.0704 - val_reg_output_mae: 3.8816

Epoch 00163: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_68.68.h5
Epoch 164/300
35/35 [==============================] - 10s 286ms/step - loss: 8.8503 - cat_output_loss: 0.5661 - reg_output_loss: 8.2841 - cat_output_mae: 0.0997 - reg_output_mae: 1.5919 - val_loss: 68.6921 - val_cat_output_loss: 0.3803 - val_reg_output_loss: 68.3119 - val_cat_output_mae: 0.0704 - val_reg_output_mae: 3.8819

Epoch 00164: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_68.69.h5
Epoch 165/300
35/35 [==============================] - 10s 286ms/step - loss: 8.8485 - cat_output_loss: 0.5661 - reg_output_loss: 8.2824 - cat_output_mae: 0.0997 - reg_output_mae: 1.5919 - val_loss: 68.7017 - val_cat_output_loss: 0.3802 - val_reg_output_loss: 68.3214 - val_cat_output_mae: 0.0704 - val_reg_output_mae: 3.8822

Epoch 00165: ReduceLROnPlateau reducing learning rate to 9.99999883788405e-08.

Epoch 00165: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_68.70.h5
Epoch 166/300
35/35 [==============================] - 10s 289ms/step - loss: 8.8425 - cat_output_loss: 0.5661 - reg_output_loss: 8.2764 - cat_output_mae: 0.0997 - reg_output_mae: 1.5914 - val_loss: 68.6810 - val_cat_output_loss: 0.3802 - val_reg_output_loss: 68.3008 - val_cat_output_mae: 0.0704 - val_reg_output_mae: 3.8821

Epoch 00166: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_68.68.h5
Epoch 167/300
35/35 [==============================] - 10s 287ms/step - loss: 8.8431 - cat_output_loss: 0.5661 - reg_output_loss: 8.2769 - cat_output_mae: 0.0997 - reg_output_mae: 1.5911 - val_loss: 68.6804 - val_cat_output_loss: 0.3802 - val_reg_output_loss: 68.3002 - val_cat_output_mae: 0.0704 - val_reg_output_mae: 3.8818

Epoch 00167: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_68.68.h5
Epoch 168/300
35/35 [==============================] - 10s 286ms/step - loss: 8.8397 - cat_output_loss: 0.5661 - reg_output_loss: 8.2736 - cat_output_mae: 0.0997 - reg_output_mae: 1.5909 - val_loss: 68.6807 - val_cat_output_loss: 0.3802 - val_reg_output_loss: 68.3005 - val_cat_output_mae: 0.0704 - val_reg_output_mae: 3.8817

Epoch 00168: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_68.68.h5
Epoch 169/300
35/35 [==============================] - 10s 287ms/step - loss: 8.8376 - cat_output_loss: 0.5661 - reg_output_loss: 8.2715 - cat_output_mae: 0.0997 - reg_output_mae: 1.5911 - val_loss: 68.6940 - val_cat_output_loss: 0.3802 - val_reg_output_loss: 68.3138 - val_cat_output_mae: 0.0704 - val_reg_output_mae: 3.8819

Epoch 00169: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_68.69.h5
Epoch 170/300
35/35 [==============================] - 10s 287ms/step - loss: 8.8370 - cat_output_loss: 0.5661 - reg_output_loss: 8.2709 - cat_output_mae: 0.0997 - reg_output_mae: 1.5912 - val_loss: 68.6939 - val_cat_output_loss: 0.3802 - val_reg_output_loss: 68.3136 - val_cat_output_mae: 0.0704 - val_reg_output_mae: 3.8819

Epoch 00170: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_68.69.h5
Epoch 171/300
35/35 [==============================] - 10s 287ms/step - loss: 8.8420 - cat_output_loss: 0.5661 - reg_output_loss: 8.2759 - cat_output_mae: 0.0997 - reg_output_mae: 1.5913 - val_loss: 68.6940 - val_cat_output_loss: 0.3803 - val_reg_output_loss: 68.3138 - val_cat_output_mae: 0.0704 - val_reg_output_mae: 3.8819

Epoch 00171: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_68.69.h5
Epoch 172/300
35/35 [==============================] - 10s 286ms/step - loss: 8.8417 - cat_output_loss: 0.5661 - reg_output_loss: 8.2755 - cat_output_mae: 0.0997 - reg_output_mae: 1.5914 - val_loss: 68.6819 - val_cat_output_loss: 0.3803 - val_reg_output_loss: 68.3016 - val_cat_output_mae: 0.0704 - val_reg_output_mae: 3.8813

Epoch 00172: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_68.68.h5
Epoch 173/300
35/35 [==============================] - 10s 287ms/step - loss: 8.8382 - cat_output_loss: 0.5661 - reg_output_loss: 8.2720 - cat_output_mae: 0.0997 - reg_output_mae: 1.5912 - val_loss: 68.6809 - val_cat_output_loss: 0.3803 - val_reg_output_loss: 68.3007 - val_cat_output_mae: 0.0704 - val_reg_output_mae: 3.8813

Epoch 00173: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_68.68.h5
Epoch 174/300
35/35 [==============================] - 10s 287ms/step - loss: 8.8365 - cat_output_loss: 0.5661 - reg_output_loss: 8.2704 - cat_output_mae: 0.0997 - reg_output_mae: 1.5911 - val_loss: 68.6778 - val_cat_output_loss: 0.3803 - val_reg_output_loss: 68.2976 - val_cat_output_mae: 0.0704 - val_reg_output_mae: 3.8813

Epoch 00174: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_68.68.h5
Epoch 175/300
35/35 [==============================] - 10s 288ms/step - loss: 8.8370 - cat_output_loss: 0.5661 - reg_output_loss: 8.2710 - cat_output_mae: 0.0997 - reg_output_mae: 1.5911 - val_loss: 68.6487 - val_cat_output_loss: 0.3803 - val_reg_output_loss: 68.2685 - val_cat_output_mae: 0.0704 - val_reg_output_mae: 3.8812

Epoch 00175: ReduceLROnPlateau reducing learning rate to 9.999998695775504e-09.

Epoch 00175: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_68.65.h5
Epoch 176/300
35/35 [==============================] - 10s 288ms/step - loss: 8.8375 - cat_output_loss: 0.5661 - reg_output_loss: 8.2714 - cat_output_mae: 0.0997 - reg_output_mae: 1.5910 - val_loss: 68.6486 - val_cat_output_loss: 0.3803 - val_reg_output_loss: 68.2683 - val_cat_output_mae: 0.0704 - val_reg_output_mae: 3.8812

Epoch 00176: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_68.65.h5
Epoch 177/300
35/35 [==============================] - 10s 286ms/step - loss: 8.8384 - cat_output_loss: 0.5661 - reg_output_loss: 8.2723 - cat_output_mae: 0.0997 - reg_output_mae: 1.5910 - val_loss: 68.6486 - val_cat_output_loss: 0.3803 - val_reg_output_loss: 68.2683 - val_cat_output_mae: 0.0704 - val_reg_output_mae: 3.8812

Epoch 00177: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_68.65.h5
Epoch 178/300
35/35 [==============================] - 10s 287ms/step - loss: 8.8384 - cat_output_loss: 0.5661 - reg_output_loss: 8.2723 - cat_output_mae: 0.0997 - reg_output_mae: 1.5910 - val_loss: 68.6487 - val_cat_output_loss: 0.3803 - val_reg_output_loss: 68.2684 - val_cat_output_mae: 0.0704 - val_reg_output_mae: 3.8812

Epoch 00178: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_68.65.h5
Epoch 179/300
35/35 [==============================] - 10s 287ms/step - loss: 8.8384 - cat_output_loss: 0.5661 - reg_output_loss: 8.2723 - cat_output_mae: 0.0997 - reg_output_mae: 1.5910 - val_loss: 68.6488 - val_cat_output_loss: 0.3803 - val_reg_output_loss: 68.2686 - val_cat_output_mae: 0.0704 - val_reg_output_mae: 3.8812

Epoch 00179: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_68.65.h5
Epoch 180/300
35/35 [==============================] - 10s 286ms/step - loss: 8.8381 - cat_output_loss: 0.5661 - reg_output_loss: 8.2720 - cat_output_mae: 0.0997 - reg_output_mae: 1.5910 - val_loss: 68.6486 - val_cat_output_loss: 0.3803 - val_reg_output_loss: 68.2683 - val_cat_output_mae: 0.0704 - val_reg_output_mae: 3.8812

Epoch 00180: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_68.65.h5
Epoch 181/300
35/35 [==============================] - 10s 288ms/step - loss: 8.8380 - cat_output_loss: 0.5661 - reg_output_loss: 8.2719 - cat_output_mae: 0.0997 - reg_output_mae: 1.5910 - val_loss: 68.6488 - val_cat_output_loss: 0.3803 - val_reg_output_loss: 68.2685 - val_cat_output_mae: 0.0704 - val_reg_output_mae: 3.8812

Epoch 00181: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_68.65.h5
Epoch 182/300
35/35 [==============================] - 10s 287ms/step - loss: 8.8381 - cat_output_loss: 0.5661 - reg_output_loss: 8.2720 - cat_output_mae: 0.0997 - reg_output_mae: 1.5910 - val_loss: 68.6797 - val_cat_output_loss: 0.3803 - val_reg_output_loss: 68.2995 - val_cat_output_mae: 0.0704 - val_reg_output_mae: 3.8815

Epoch 00182: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_68.68.h5
Epoch 183/300
35/35 [==============================] - 10s 287ms/step - loss: 8.8381 - cat_output_loss: 0.5661 - reg_output_loss: 8.2720 - cat_output_mae: 0.0997 - reg_output_mae: 1.5910 - val_loss: 68.6489 - val_cat_output_loss: 0.3803 - val_reg_output_loss: 68.2686 - val_cat_output_mae: 0.0704 - val_reg_output_mae: 3.8812

Epoch 00183: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_68.65.h5
Epoch 184/300
35/35 [==============================] - 10s 286ms/step - loss: 8.8379 - cat_output_loss: 0.5661 - reg_output_loss: 8.2718 - cat_output_mae: 0.0997 - reg_output_mae: 1.5909 - val_loss: 68.6496 - val_cat_output_loss: 0.3803 - val_reg_output_loss: 68.2693 - val_cat_output_mae: 0.0704 - val_reg_output_mae: 3.8813

Epoch 00184: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_68.65.h5
Epoch 185/300
35/35 [==============================] - 10s 288ms/step - loss: 8.8384 - cat_output_loss: 0.5661 - reg_output_loss: 8.2723 - cat_output_mae: 0.0997 - reg_output_mae: 1.5910 - val_loss: 68.6494 - val_cat_output_loss: 0.3803 - val_reg_output_loss: 68.2691 - val_cat_output_mae: 0.0704 - val_reg_output_mae: 3.8813

Epoch 00185: ReduceLROnPlateau reducing learning rate to 9.99999905104687e-10.

Epoch 00185: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_68.65.h5
Epoch 186/300
35/35 [==============================] - 10s 286ms/step - loss: 8.8382 - cat_output_loss: 0.5661 - reg_output_loss: 8.2721 - cat_output_mae: 0.0997 - reg_output_mae: 1.5910 - val_loss: 68.6494 - val_cat_output_loss: 0.3803 - val_reg_output_loss: 68.2691 - val_cat_output_mae: 0.0704 - val_reg_output_mae: 3.8813

Epoch 00186: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_68.65.h5
Epoch 187/300
35/35 [==============================] - 10s 287ms/step - loss: 8.8382 - cat_output_loss: 0.5661 - reg_output_loss: 8.2721 - cat_output_mae: 0.0997 - reg_output_mae: 1.5910 - val_loss: 68.6494 - val_cat_output_loss: 0.3803 - val_reg_output_loss: 68.2691 - val_cat_output_mae: 0.0704 - val_reg_output_mae: 3.8813

Epoch 00187: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_68.65.h5
Epoch 188/300
35/35 [==============================] - 10s 286ms/step - loss: 8.8382 - cat_output_loss: 0.5661 - reg_output_loss: 8.2721 - cat_output_mae: 0.0997 - reg_output_mae: 1.5910 - val_loss: 68.6494 - val_cat_output_loss: 0.3803 - val_reg_output_loss: 68.2691 - val_cat_output_mae: 0.0704 - val_reg_output_mae: 3.8813

Epoch 00188: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_68.65.h5
Epoch 189/300
35/35 [==============================] - 10s 287ms/step - loss: 8.8382 - cat_output_loss: 0.5661 - reg_output_loss: 8.2721 - cat_output_mae: 0.0997 - reg_output_mae: 1.5910 - val_loss: 68.6494 - val_cat_output_loss: 0.3803 - val_reg_output_loss: 68.2691 - val_cat_output_mae: 0.0704 - val_reg_output_mae: 3.8813

Epoch 00189: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_68.65.h5
Epoch 190/300
35/35 [==============================] - 10s 288ms/step - loss: 8.8382 - cat_output_loss: 0.5661 - reg_output_loss: 8.2721 - cat_output_mae: 0.0997 - reg_output_mae: 1.5910 - val_loss: 68.6494 - val_cat_output_loss: 0.3803 - val_reg_output_loss: 68.2691 - val_cat_output_mae: 0.0704 - val_reg_output_mae: 3.8813

Epoch 00190: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_68.65.h5
Epoch 191/300
35/35 [==============================] - 10s 286ms/step - loss: 8.8382 - cat_output_loss: 0.5661 - reg_output_loss: 8.2721 - cat_output_mae: 0.0997 - reg_output_mae: 1.5910 - val_loss: 68.6494 - val_cat_output_loss: 0.3803 - val_reg_output_loss: 68.2691 - val_cat_output_mae: 0.0704 - val_reg_output_mae: 3.8813

Epoch 00191: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_68.65.h5
Epoch 192/300
35/35 [==============================] - 10s 287ms/step - loss: 8.8382 - cat_output_loss: 0.5661 - reg_output_loss: 8.2721 - cat_output_mae: 0.0997 - reg_output_mae: 1.5910 - val_loss: 68.6494 - val_cat_output_loss: 0.3803 - val_reg_output_loss: 68.2691 - val_cat_output_mae: 0.0704 - val_reg_output_mae: 3.8813

Epoch 00192: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_68.65.h5
Epoch 193/300
35/35 [==============================] - 10s 286ms/step - loss: 8.8382 - cat_output_loss: 0.5661 - reg_output_loss: 8.2721 - cat_output_mae: 0.0997 - reg_output_mae: 1.5910 - val_loss: 68.6494 - val_cat_output_loss: 0.3803 - val_reg_output_loss: 68.2691 - val_cat_output_mae: 0.0704 - val_reg_output_mae: 3.8813

Epoch 00193: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_68.65.h5
Epoch 194/300
35/35 [==============================] - 10s 288ms/step - loss: 8.8378 - cat_output_loss: 0.5661 - reg_output_loss: 8.2717 - cat_output_mae: 0.0997 - reg_output_mae: 1.5909 - val_loss: 68.6798 - val_cat_output_loss: 0.3803 - val_reg_output_loss: 68.2995 - val_cat_output_mae: 0.0704 - val_reg_output_mae: 3.8815

Epoch 00194: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_68.68.h5
Epoch 195/300
35/35 [==============================] - 10s 286ms/step - loss: 8.8378 - cat_output_loss: 0.5661 - reg_output_loss: 8.2717 - cat_output_mae: 0.0997 - reg_output_mae: 1.5909 - val_loss: 68.6798 - val_cat_output_loss: 0.3803 - val_reg_output_loss: 68.2995 - val_cat_output_mae: 0.0704 - val_reg_output_mae: 3.8815

Epoch 00195: ReduceLROnPlateau reducing learning rate to 9.999998606957661e-11.

Epoch 00195: saving model to /root/jupyter/데이콘/청경채/model2/forecast_weight_68.68.h5

AttributeErrorTraceback (most recent call last)
<ipython-input-36-fc3a170f38d8> in <module>
     29 file_loss=min([i.split('/')[-1].split('_')[-1].replace('.h5','')
     30                for i in os.listdir('/root/jupyter/데이콘/청경채/model2/') if re.compile('forecast').findall(i)])
---> 31 forecast_model=tf.kerass.models.load_model(f'/root/jupyter/데이콘/청경채/model2/forecast_weight_{file_loss}.h5')
     32 tf.keras.models.save_model(forecast_model,f'/root/jupyter/데이콘/청경채/output/forecast_weight_best_model_v1.h5')

AttributeError: module 'tensorflow' has no attribute 'kerass'
In [38]:
pred1,pred2=forecast_model.predict(te_data)
submit=pd.read_csv(f"{main_dir}/sample_submission.csv")
submit['leaf_weight']=pred2
submit.to_csv(f"/root/jupyter/데이콘/청경채/submit_6.csv",index=False)