XGBoost 
 training 

 GPU使用 

 機能・要件 
 構成・方式
 タスク
 ライブラリ
 導入
 Sample

 MNIST_XGBoost  # mnistXGBoost.py  import time  import xgboost as xgb  import pandas as pd  import numpy as np  from mnist import load_mnist  #from sklearn.model_selection import train_test_split  from sklearn.metrics import accuracy_score  # MNISTデータセットの事前準備  (x_train, t_train), (x_test, t_test) = load_mnist()  # 学習データと検証データに分割 (分割済の場合は不要)  #split_ratio = 0.2  #x_train, x_validation, t_train, t_validation = train_test_split(x_train, t_train, test_size=split_ratio)  # 平滑化  #x_train = x_train.reshape(-1, 784)  #x_validation = x_validation.reshape(-1, 784)  #x_test = x_test.reshape(-1, 784)  # 正規化  #x_train = x_train.astype(float) / 255  #x_validation = x_validation.astype(float) / 255  #x_test = x_test.astype(float) / 255  # データを設定  xgb_train_data = xgb.DMatrix(x_train, label=t_train)  xgb_eval_data = xgb.DMatrix(x_validation, label=t_validation)  xgb_test_data = xgb.DMatrix(x_test, label=t_test)  # XGBoostモデル構築  start = time.time()  xgb_params = {"objective": "multi:softmax",   "num_class": 10,   "eval_metric": "mlogloss"}  evals = [(xgb_train_data, "train"), (xgb_eval_data, "eval")]  gbm = xgb.train(xgb_params, xgb_train_data,   num_boost_round=100,   early_stopping_rounds=10,   evals=evals)  preds = gbm.predict(xgb_test_data)  print("accuracy score: {}".format(accuracy_score(t_test, preds)))  print("elapsed time: {}".format(time.time() - start))
 training  (base) \XGBoost\mnist> python mnist_XGBoost.py  \Anaconda3\lib\site-packages\xgboost\compat.py:36: FutureWarning: pandas.  Int64Index is deprecated and will be removed from pandas in a future version.   Use pandas.Index with the appropriate dtype instead.   from pandas import MultiIndex, Int64Index  [0] train-mlogloss:1.35868 eval-mlogloss:1.38380  [1] train-mlogloss:1.02530 eval-mlogloss:1.06320  [2] train-mlogloss:0.80874 eval-mlogloss:0.85612  [3] train-mlogloss:0.65284 eval-mlogloss:0.70651  [4] train-mlogloss:0.53599 eval-mlogloss:0.59545  [5] train-mlogloss:0.44736 eval-mlogloss:0.51078  [96] train-mlogloss:0.00177 eval-mlogloss:0.08159  [97] train-mlogloss:0.00173 eval-mlogloss:0.08157  [98] train-mlogloss:0.00169 eval-mlogloss:0.08137  [99] train-mlogloss:0.00165 eval-mlogloss:0.08122  accuracy score: 0.9757  elapsed time: 128.21487879753113