bbopt icon indicating copy to clipboard operation
bbopt copied to clipboard

[Errno 9] Bad file descriptor

Open sakalouski opened this issue 6 years ago • 2 comments

Hi,

running the code from the command line. Tried using json - did not help. However, if I comment self._load_data(), things seem to work. The question is: does the comment break the optimization process?

Thank You!

optimize.py def reload(self): """Completely reload the optimizer.""" self._old_params = {} self._examples = [] #self._load_data() <-------------------------------------------------COMMENTED self.run(alg=None) # backend is set to serving by default

Traceback (most recent call last): File "test.py", line 26, in bb = BlackBoxOptimizer(file=file,use_json=True) File "/home/b7066789/.local/lib/python3.6/site-packages/bbopt/optimizer.py", line 72, in init self.reload() File "/home/b7066789/.local/lib/python3.6/site-packages/bbopt/optimizer.py", line 78, in reload self._load_data() File "/home/b7066789/.local/lib/python3.6/site-packages/bbopt/optimizer.py", line 192, in _load_data with Lock(self.data_file, "rb", timeout=lock_timeout) as df: File "/home/b7066789/.local/lib/python3.6/site-packages/portalocker/utils.py", line 197, in enter return self.acquire() File "/home/b7066789/.local/lib/python3.6/site-packages/portalocker/utils.py", line 157, in acquire raise exceptions.LockException(exception) portalocker.exceptions.LockException: [Errno 9] Bad file descriptor

System: CentOS 7

pip freeze absl-py==0.2.2 anndata==0.6.6 args==0.1.0 asdf==2.1.0 ast2vec==0.3.8a0 astetik==1.9.5 astor==0.6.2 astropy==3.0.4 atomicwrites==1.2.1 attrs==18.2.0 backcall==0.1.0 bblfsh==2.9.13 bbopt==0.4.1 beautifulsoup4==4.6.3 biopython==1.72 bleach==1.5.0 boto==2.49.0 boto3==1.9.86 botocore==1.12.86 bs4==0.0.1 bz2file==0.98 cachetools==2.1.0 certifi==2018.8.24 chances==0.1.4 chardet==3.0.4 Click==7.0 clint==0.5.1 colorama==0.3.9 cycler==0.10.0 cymem==2.0.2 cytoolz==0.9.0.1 datasketch==1.4.1 decorator==4.3.0 dill==0.2.9 docker==3.5.0 docker-pycreds==0.3.0 docutils==0.14 dulwich==0.19.6 EasyProcess==0.2.3 en-core-web-sm==2.0.0 entrypoints==0.2.3 fa2==0.2 flake8==3.6.0 flake8-polyfill==1.0.2 flatbuffers==1.10 funcsigs==1.0.2 funcy==1.11 future==0.17.1 gast==0.2.0 gensim==3.7.0 geonamescache==1.0.1 google-api-core==1.4.0 google-auth==1.5.1 google-auth-httplib2==0.0.3 google-cloud-core==0.25.0 google-cloud-storage==1.2.0 google-resumable-media==0.3.1 googleapis-common-protos==1.5.3 GPUtil==1.3.0 graphviz==0.10.1 grpcio==1.10.0 grpcio-tools==1.10.0 h5py==2.7.1 html5lib==0.9999999 HTMLParser==0.0.2 HTSeq==0.10.0 httplib2==0.11.3 humanize==0.5.1 hyperas==0.4 hyperopt==0.1.1 idna==2.7 igraph==0.1.11 imageio==2.4.1 ipykernel==4.8.2 ipython==6.4.0 ipython-genutils==0.2.0 ipywidgets==7.4.2 jedi==0.12.0 Jinja2==2.10 jmespath==0.9.3 joblib==0.11 jsonschema==2.6.0 jupyter==1.0.0 jupyter-client==5.2.3 jupyter-console==5.2.0 jupyter-core==4.4.0 Keras==2.2.4 Keras-Applications==1.0.7 Keras-Preprocessing==1.0.9 kerasplotlib==0.1.4 kiwisolver==1.0.1 langdetect==1.0.7 lightgbm==2.2.2 livelossplot==0.2.2 llvmlite==0.23.0 louvain==0.6.1 lxml==4.2.1 lz4==2.1.0 mando==0.6.4 Markdown==2.6.11 MarkupSafe==1.0 matplotlib==3.0.2 mccabe==0.6.1 mistune==0.8.3 mnnpy==0.1.9.4 modelforge==0.7.0 modin==0.2.5 more-itertools==4.3.0 msgpack==0.5.6 msgpack-numpy==0.4.3.2 murmurhash==1.0.1 natsort==5.3.1 nbconvert==5.3.1 nbformat==4.4.0 netifaces==0.10.7 networkx==1.11 nltk==3.4 notebook==5.6.0 numba==0.38.0 numexpr==2.6.5 numpy==1.14.5 pandas==0.23.4 pandocfilters==1.4.2 parquet==1.2 parso==0.2.0 patool==1.12 patsy==0.5.0 pexpect==4.5.0 pickleshare==0.7.4 Pillow==5.1.0 plac==0.9.6 pluggy==0.7.1 ply==3.11 portalocker==1.4.0 pqdict==1.0.0 preshed==2.0.1 prometheus-client==0.3.1 prompt-toolkit==1.0.15 protobuf==3.6.1 psutil==5.4.8 ptyprocess==0.5.2 py==1.6.0 py4j==0.10.7 pyasn1==0.4.4 pyasn1-modules==0.2.2 pycodestyle==2.4.0 pyflakes==2.0.0 Pygments==2.2.0 pyLDAvis==2.1.2 pymongo==3.7.2 pyparsing==2.2.0 pysam==0.14.1 pyspark==2.4.0 PyStemmer==1.3.0 pytest==3.8.2 pytextrank==1.1.0 python-dateutil==2.7.3 python-igraph==0.7.1.post6 python-libsbml==5.17.0 python-pptx==0.6.9 python-snappy==0.5.3 pytz==2018.4 pyunpack==0.1.2 PyWavelets==1.0.1 PyYAML==3.12 pyzmq==17.1.2 qtconsole==4.4.1 radon==2.4.0 ray==0.6.0 redis==3.0.1 regex==2018.1.10 requests==2.20.1 rsa==4.0 s3transfer==0.1.13 scanpy==1.2.2 scikit-image==0.13.1 scikit-learn==0.20.1 scikit-optimize==0.5.2 scipy==1.1.0 seaborn==0.8.1 selectolax==0.1.8 semantic-version==2.6.0 Send2Trash==1.5.0 sh==1.12.14 shap==0.28.5 simplegeneric==0.8.1 simplejson==3.16.0 singledispatch==3.4.0.3 six==1.11.0 sklearn==0.0 smart-open==1.8.0 sourced-engine==0.6.4 sourced-ml==0.6.3 spacy==2.0.18 statistics==1.0.3.5 statsmodels==0.9.0 stocal==1.2 summa==1.2.0 tables==3.4.3 talos==0.4.8 tensorboard==1.12.2 tensorflow==1.12.0 termcolor==1.1.0 terminado==0.8.1 testpath==0.3.1 Theano==1.0.2 thinc==6.12.1 thriftpy==0.3.9 tmsc==0.1.5a0 toolz==0.9.0 torch==0.4.1 torchvision==0.2.1 tornado==5.1 tqdm==4.31.1 traitlets==4.3.2 typing==3.6.6 ujson==1.35 umap==0.1.1 umap-learn==0.2.3 urllib3==1.23 wcwidth==0.1.7 websocket-client==0.53.0 Werkzeug==0.14.1 widgetsnbextension==3.4.0 wrangle==0.5.1 wrapt==1.10.11 xgboost==0.72 XlsxWriter==1.0.5 xmlutils==1.4

The code:

X_train, Y_train, X_val, Y_val = data()

def run_trial(): """Run one trial of hyperparameter optimization.""" # Start BBopt: bb.run() input_shape = 8208

h_n_num = bb.randint('num_neur',5,1000)
act = bb.choice('activ_func',['selu','relu','elu'])
num_lay = bb.randint('num_hidden_layers',0,10)
dout = bb.uniform("dropout", 0, 1)
lr = bb.uniform("init_learn_rate", 1e-5, 0.1)
bsize = bb.choice('batch_size',[8,16,32,64,128])

# Create model:
a = Input(shape=(input_shape,))
b = Dense(h_n_num,activation=act)(a)
b = Dropout(dout)(b)
for l in range(num_lay):
    b = Dense(h_n_num,activation=act)(b)
    b = Dropout(dout)(b)
output = Dense(1,activation='linear',name='out')(b)

model = keras.models.Model(inputs=a, outputs=output)
opt = Nadam(lr)
model.compile(optimizer = opt, loss=mse)

# Train model:
history = model.fit(x=X_train[:-70], y=Y_train[:-70],batch_size=bsize,epochs=1,
                          validation_data=(X_train[-70:],Y_train[-70:]),
                             verbose=0,
                             validation_split = 0.4,
                             callbacks=[EarlyStopping(patience=30), 
                                        ReduceLROnPlateau(monitor='val_loss', factor=0.5, patience=25,verbose=0)], 
                             shuffle=False)

train_loss = history.history["loss"][-1]
val_loss = history.history["val_loss"][-1]

bb.remember({
    "train_loss": train_loss,
    "val_loss": val_loss,
})

bb.minimize(val_loss)

num_trials = 5 result = []

for i in tqdm(range(num_trials)): run_trial() result.append(bb.get_current_run()) if len(result)>1: [i['memo'].update(i['values']) for i in result] temp = [i['memo'] for i in result] pd.DataFrame(temp).to_csv('./transfer_learning/DL_optimization_reports/patch_weekly_5000_trials.csv')

sakalouski avatar Feb 22 '19 10:02 sakalouski

@sakalouski I can't seem to replicate your error, but I suspect that has to do with the operating system and Python version you're running on, since it seems like the error is occurring when BBopt tries to lock the data file (which is a necessary step--it won't work if you comment out self._load_data()). It's also possible that there's something different about the code you're actually running and the version of the code you posted here that I ran. This is the code I ran:

import keras
from keras.optimizers import Nadam
from keras.layers import Input, Dense, Dropout
from keras.callbacks import EarlyStopping, ReduceLROnPlateau
from keras.metrics import mse
from sklearn import datasets
import pandas as pd
from tqdm import tqdm
from bbopt import BlackBoxOptimizer

bb = BlackBoxOptimizer(__file__)

iris = datasets.load_iris()

X = iris.data
Y = iris.target

train_split = int(.6*len(X))

X_train, X_val = X[:train_split], X[train_split:]
Y_train, Y_val = Y[:train_split], Y[train_split:]


def run_trial():
    """Run one trial of hyperparameter optimization."""
    # Start BBopt:
    bb.run()
    input_shape = 4

    h_n_num = bb.randint('num_neur',5,1000)
    act = bb.choice('activ_func',['selu','relu','elu'])
    num_lay = bb.randint('num_hidden_layers',0,10)
    dout = bb.uniform("dropout", 0, 1)
    lr = bb.uniform("init_learn_rate", 1e-5, 0.1)
    bsize = bb.choice('batch_size',[8,16,32,64,128])

    # Create model:
    a = Input(shape=(input_shape,))
    b = Dense(h_n_num,activation=act)(a)
    b = Dropout(dout)(b)
    for l in range(num_lay):
        b = Dense(h_n_num,activation=act)(b)
        b = Dropout(dout)(b)
    output = Dense(1,activation='linear',name='out')(b)

    model = keras.models.Model(inputs=a, outputs=output)
    opt = Nadam(lr)
    model.compile(optimizer = opt, loss=mse)

    # Train model:
    history = model.fit(x=X_train[:-70], y=Y_train[:-70],batch_size=bsize,epochs=1,
                              validation_data=(X_train[-70:],Y_train[-70:]),
                                 verbose=0,
                                 validation_split = 0.4,
                                 callbacks=[EarlyStopping(patience=30),
                                            ReduceLROnPlateau(monitor='val_loss', factor=0.5, patience=25,verbose=0)],
                                 shuffle=False)

    train_loss = history.history["loss"][-1]
    val_loss = history.history["val_loss"][-1]

    bb.remember({
        "train_loss": train_loss,
        "val_loss": val_loss,
    })

    bb.minimize(val_loss)

num_trials = 5
result = []

for i in tqdm(range(num_trials)):
    run_trial()
    result.append(bb.get_current_run())
    if len(result)>1:
        [i['memo'].update(i['values']) for i in result]
    temp = [i['memo'] for i in result]
    pd.DataFrame(temp).to_csv('./patch_weekly_5000_trials.csv')

Do you get an error when you run this code?

evhub avatar Feb 22 '19 22:02 evhub

@sakalouski Also, if you could try updating your bbopt to 0.4.2 (pip install -U bbopt) and let me know what the error message you get then is, that would be greatly appreciated.

evhub avatar Feb 22 '19 22:02 evhub