rpg_asynet icon indicating copy to clipboard operation
rpg_asynet copied to clipboard

Failed to run "pip install async_sparse_py/"

Open hongchi533 opened this issue 3 years ago • 1 comments

Dear author,

I met this ERROR when tried to install async_sparse_py/:

Processing ./async_sparse_py DEPRECATION: A future pip version will change local packages to be built in-place without first copying to a temporary directory. We recommend you use --use-feature=in-tree-build to test your packages with this new behavior before it becomes the default. pip 21.3 will remove support for this functionality. You can find discussion regarding this at https://github.com/pypa/pip/issues/7555. Building wheels for collected packages: async-sparse Building wheel for async-sparse (setup.py) ... error ERROR: Failed building wheel for async-sparse Running setup.py clean for async-sparse Failed to build async-sparse Installing collected packages: async-sparse Running setup.py install for async-sparse ... error ERROR: Command errored out with exit status 1: /usr/bin/python3 -u -c 'import io, os, sys, setuptools, tokenize; sys.argv[0] = '"'"'/tmp/pip-req-build-9o5kx_9y/setup.py'"'"'; file='"'"'/tmp/pip-req-build-9o5kx_9y/setup.py'"'"';f = getattr(tokenize, '"'"'open'"'"', open)(file) if os.path.exists(file) else io.StringIO('"'"'from setuptools import setup; setup()'"'"');code = f.read().replace('"'"'\r\n'"'"', '"'"'\n'"'"');f.close();exec(compile(code, file, '"'"'exec'"'"'))' install --record /tmp/pip-record-ib5zgp17/install-record.txt --single-version-externally-managed --compile --install-headers /usr/local/include/python3.7/async-sparse Check the logs for full command output.

May you know how to solve this problem?

My gcc version is 7.5.0. I have installed eigen 3.4 using: git clone https://gitlab.com/libeigen/eigen.git --branch 3.4-rc1 successfully.

Thank you very much.

hongchi533 avatar Dec 22 '21 04:12 hongchi533

Can you solve this problem? We have the same.

Ldq99 avatar Jun 24 '22 12:06 Ldq99

I have made some modifications to the code in conv2d.cpp, and it can run successfully

AsynSparseConvolution2D::ReturnType AsynSparseConvolution2D::forward(const Eigen::Ref<const Eigen::MatrixXi> update_location, const Eigen::Ref<const Eigen::MatrixXf> input_feature_map, Eigen::Ref<ActiveMatrix>& active_sites_map, RuleBook& rule_book, bool no_update_locations) { if (debug_) std::cout << "init active sites" << std::endl; Eigen::VectorXi update_location_linear = update_location(Eigen::all,1) + rule_book.W_ * update_location(Eigen::all,0); if (debug_) std::cout << "init active sites" << std::endl;

// time before rulebook
std::chrono::_V2::system_clock::time_point start, finish, global_start, global_finish ;
std::chrono::duration<double> time_span;

start = std::chrono::high_resolution_clock::now();

// global_start = std::chrono::high_resolution_clock::now(); if (debug_) std::cout << "init active sites" << std::endl;

int num_update_locations = update_location.rows();
Eigen::Matrix<bool,-1,1> bool_new_active_site = Eigen::Matrix<bool,-1,1>::Constant(num_update_locations, false);
Eigen::Matrix<bool,-1,1> zero_input_update = Eigen::Matrix<bool,-1,1>::Constant(num_update_locations, false);

if (first_layer_)
{
    zero_input_update = input_feature_map(update_location_linear, Eigen::all).array().abs().rowwise().sum() == 0; 
    bool_new_active_site = old_input_feature_map_(update_location_linear, Eigen::all).array().abs().rowwise().sum() == 0;

    if (debug_)  std::cout << "Zero input update: " << zero_input_update.transpose()<< std::endl;    
    if (debug_)  std::cout << "bool_new_active_site: " << bool_new_active_site.transpose()<< std::endl;    
}

// finish = std::chrono::high_resolution_clock::now(); // time_span = finish - start; // std::cout << "time before rulebook: " << time_span.count() << std::endl; // start = std::chrono::high_resolution_clock::now();

Eigen::VectorXi new_update_location_linear;

if (!no_update_locations)
{
    updateRulebooks(bool_new_active_site,
                    zero_input_update,
                    update_location_linear,
                    active_sites_map,
                    new_update_location_linear,
                    rule_book);
 }

Eigen::MatrixXi new_update_location(new_update_location_linear.rows(),2);
new_update_location(Eigen::all, 0) = new_update_location_linear.unaryExpr([&](const int x) { return x / rule_book.W_; });
new_update_location(Eigen::all, 1) = new_update_location_linear.unaryExpr([&](const int x) { return x % rule_book.W_; });


if (debug_)  rule_book.print();

finish = std::chrono::high_resolution_clock::now();
time_span = finish - start;

// std::cout << "time during rulebook: " << time_span.count() << " " << float(time_span.count()) << std::endl; // start = std::chrono::high_resolution_clock::now();

Eigen::MatrixXf output_feature_map = output_feature_map_;

bool first = true;
for (int kernel_index=0; kernel_index<filter_volume_; kernel_index++)
{
    int nrules = rule_book.nrules(kernel_index);
    if (nrules == 0)
        continue;

    Eigen::MatrixXf matrix = weights_(kernel_index, Eigen::all).reshaped(nOut_, nIn_);

    std::vector<int> input, output;
    rule_book.getRules(kernel_index, input, output);

    Eigen::MatrixXf delta_feature(nrules, nIn_);
    
    for (int r=0; r<nrules; r++) 
    {
        if (active_sites_map(output[r]) != Site::NEW_ACTIVE)
        {
            delta_feature(r, Eigen::all) = input_feature_map(input[r], Eigen::all) - old_input_feature_map_(input[r], Eigen::all);
        }
        else
        {
            delta_feature(r, Eigen::all) = input_feature_map(input[r], Eigen::all);
        }

        //std::cout << (matrix * delta_feature(r, Eigen::all).transpose()).transpose() << std::endl;
        output_feature_map(output[r], Eigen::all) += matrix * delta_feature(r, Eigen::all).transpose();;
        
    }
    //Eigen::MatrixXf update_term = matrix * delta_feature.transpose();
    //std::cout << "A "<< update_term.transpose() << std::endl;
    //std::cout << "B "<< update_term1.transpose() << std::endl;
    

    if (first)
    {
        first = false;
        if (debug_)  std::cout << "Feature delta: " << delta_feature << std::endl;
        if (debug_)  std::cout << "Weights: " << weights_(kernel_index, Eigen::all).reshaped(nOut_, nIn_) << std::endl;
    }           

    if (debug_)  std::cout << "output_feature_map: " << output_feature_map.sum() << std::endl;
}

// finish = std::chrono::high_resolution_clock::now(); // time_span = finish - start; // std::cout << "time during update: " << time_span.count() << std::endl; // start = std::chrono::high_resolution_clock::now();

for (int i=0; i<active_sites_map.rows(); i++)
{
    if (active_sites_map(i) == Site::NEW_INACTIVE)
    {
        for (int j=0; j<nOut_; j++)
            output_feature_map(i,j) = 0;
    }
    if (use_bias_ && active_sites_map(i) == Site::NEW_ACTIVE)
        output_feature_map(i, Eigen::all) += bias_;
}

if (debug_)  std::cout << "output and input feature_map after bias: " << output_feature_map.sum() << " " << input_feature_map.sum() << std::endl;

old_input_feature_map_ = input_feature_map;
output_feature_map_ = output_feature_map;

// finish = std::chrono::high_resolution_clock::now(); // time_span = finish - start; // std::cout << "time after rulebook: " << time_span.count() << std::endl;

// global_finish = std::chrono::high_resolution_clock::now(); // time_span = global_finish - global_start; // std::cout << "global time: " << time_span.count() << std::endl;

if (debug_)  std::cout << "output and input feature_map after bias: " << new_update_location.cols() << " " << new_update_location.rows() << std::endl;
if (debug_)  std::cout << new_update_location << std::endl;

// std::cout << "Time for Rulebook: " << output_feature_map(0, 0) << std::endl; ActiveMatrix active_sites_copy = active_sites_map; return std::make_tuple(new_update_location, output_feature_map, active_sites_copy); }

Noryway avatar Oct 12 '24 02:10 Noryway