Skip to content

Instantly share code, notes, and snippets.

View erdogant's full-sized avatar
🏠
Working from home

Erdogan erdogant

🏠
Working from home
View GitHub Profile
# Make edits in the dataframe.
d3.edge_properties
# label x y ... stroke opacity tooltip
# 0 acc 0.796433 0.745925 ... #000000 0.8 acc <br /> Survival: 44.5
# 1 acc 0.795550 0.739818 ... #000000 0.8 acc <br /> Survival: 55.0
# 2 acc 0.793272 0.739995 ... #000000 0.8 acc <br /> Survival: 63.8
# 3 acc 0.803293 0.747982 ... #000000 0.8 acc <br /> Survival: 11.9
# 4 acc 0.793152 0.725707 ... #000000 0.8 acc <br /> Survival: 79.7
# ... ... ... ... ... ... ...
# Load library
from d3blocks import D3Blocks
# Initialize
d3 = D3Blocks(frame=True)
# Import example
df = d3.import_example('cancer')
# Setup the tooltip
tooltip=df['labels'].values + ' <br /> Survival: ' + df['survival_months'].astype(str).str[0:4].values
# Create interactive scatter plot
from d3blocks import D3Blocks
# Initialize
d3 = D3Blocks()
# Make scatter
d3.scatter(map_pca[:,0],
map_pca[:,1],
x1=map_tsne[:,0],
# Import library
from d3blocks import D3Blocks
# Initialize
d3 = D3Blocks()
# Make particles
d3.particles('D3Blocks', collision=0.05, spacing=10, figsize=[1200, 500])
@erdogant
erdogant / d3blocks_violin.py
Last active February 4, 2023 23:06
d3blocks
# Import library
from d3blocks import D3Blocks
# Initialize
d3 = D3Blocks()
# import example
df = d3.import_example('cancer')
# Tooltip
@erdogant
erdogant / hgboost_save_load.py
Last active November 5, 2022 16:15
hgboost
# Save model
status = hgb.save(filepath='hgboost_model.pkl', overwrite=True)
# [pypickle] Pickle file saved: [hgboost_model.pkl]
# [hgboost] >Saving.. True
# Load model
From hgboost import hgboost # Import library when using a fresh start
hgb = hgboost() # Initialize hgboost
results = hgb.load(filepath='hgboost_model.pkl') # Load the pickle file with model parameters and trained model.
# [pypickle] Pickle file loaded: [hgboost_model.pkl]
##########################################
# Import Titanc dataset and preprocessing.
##########################################
df = hgb.import_example(data='titanic')
print(df)
# PassengerId Survived Pclass ... Fare Cabin Embarked
# 0 1 0 3 ... 7.2500 NaN S
# 1 2 1 1 ... 71.2833 C85 C
# Fit
results = hgb.xgboost(X, y, pos_label=1, eval_metric='auc')
# results = hgb.catboost(X, y, pos_label=1, eval_metric='auc')
# results = hgb.lightboost(X, y, pos_label=1, eval_metric='auc')
# [hgboost] >Start hgboost classification.
# [hgboost] >Collecting xgb_clf parameters.
# [hgboost] >Correct for unbalanced classes using [scale_pos_weight]..
# [hgboost] >[13] hyperparameters in gridsearch space. Used loss function: [auc].
#######################################################
# Import Titanic dataset, and preprocessing
#######################################################
# Import the Titanic dataset
df = hgb.import_example(data='titanic')
print(df)
# PassengerId Survived Pclass ... Fare Cabin Embarked
# 0 1 0 3 ... 7.2500 NaN S
# 1 2 1 1 ... 71.2833 C85 C
# Import the library
from hgboost import hgboost
# Initialize library.
hgb = hgboost(
max_eval=250, # Search space is based on the number of evaluations.
threshold=0.5, # Classification threshold. In case of two-class model this is 0.5.
cv=5, # k-folds cross-validation.
test_size=0.2, # Percentage split for the testset.
val_size=0.2, # Percentage split for the validationset.