FATE icon indicating copy to clipboard operation
FATE copied to clipboard

Why Homo Logistic Regression Multi_Class won't output any data on the Host side ?

Open amyseoj1 opened this issue 1 year ago • 1 comments

Hello,

When I run Homo-LR, there is no evaluation result nor data output on the host side. You can reproduce this by running pipeline-homo-lr-one-vs-all.py

import argparse import json

from pipeline.backend.pipeline import PipeLine from pipeline.component import DataTransform from pipeline.component import Evaluation from pipeline.component import HomoLR from pipeline.component import Reader from pipeline.interface import Data from pipeline.interface import Model from pipeline.utils.tools import load_job_config

def prettify(response, verbose=True): if verbose: print(json.dumps(response, indent=4, ensure_ascii=False)) print() return response

def main(config="../../config.yaml", namespace=""): if isinstance(config, str): config = load_job_config(config) parties = config.parties guest = parties.guest[0] hosts = parties.host[0] arbiter = parties.arbiter[0] guest_train_data = {"name": "vehicle_scale_homo_guest", "namespace": f"experiment{namespace}"} host_train_data = {"name": "vehicle_scale_homo_host", "namespace": f"experiment{namespace}"}

# initialize pipeline
pipeline = PipeLine()
# set job initiator
pipeline.set_initiator(role='guest', party_id=guest)
# set participants information
pipeline.set_roles(guest=guest, host=hosts, arbiter=arbiter)

# define Reader components to read in data
reader_0 = Reader(name="reader_0")
# configure Reader for guest
reader_0.get_party_instance(role='guest', party_id=guest).component_param(table=guest_train_data)
# configure Reader for host
reader_0.get_party_instance(role='host', party_id=hosts).component_param(table=host_train_data)

data_transform_0 = DataTransform(name="data_transform_0", output_format='dense', with_label=True)

pipeline.add_component(reader_0)

pipeline.add_component(data_transform_0, data=Data(data=reader_0.output.data))

lr_param = {
    "penalty": "L2",
    "optimizer": "sgd",
    "tol": 1e-05,
    "alpha": 0.01,
    "early_stop": "diff",
    "batch_size": -1,
    "learning_rate": 0.15,
    "decay": 1,
    "decay_sqrt": True,
    "init_param": {
        "init_method": "zeros"
    },
    "cv_param": {
        "n_splits": 4,
        "shuffle": True,
        "random_seed": 33,
        "need_cv": False
    },
    "callback_param": {
        "callbacks": ["ModelCheckpoint", "EarlyStopping"]
    }
}

homo_lr_0 = HomoLR(name="homo_lr_0", max_iter=1, **lr_param)
homo_lr_1 = HomoLR(name="homo_lr_1")

pipeline.add_component(homo_lr_0, data=Data(train_data=data_transform_0.output.data))
pipeline.add_component(homo_lr_1, data=Data(test_data=data_transform_0.output.data),
                       model=Model(model=homo_lr_0.output.model))

evaluation_0 = Evaluation(name="evaluation_0", eval_type="multi")
pipeline.add_component(evaluation_0, data=Data(data=[homo_lr_0.output.data,
                                                     homo_lr_1.output.data]))

pipeline.compile()

# fit model
pipeline.fit()
# query component summary
prettify(pipeline.get_component("evaluation_0").get_summary())
return pipeline

if name == "main": parser = argparse.ArgumentParser("PIPELINE DEMO") parser.add_argument("-config", type=str, help="config file") args = parser.parse_args() if args.config is not None: main(args.config) else: main()

amyseoj1 avatar Aug 27 '23 18:08 amyseoj1

I will check this problem

talkingwallace avatar Aug 31 '23 07:08 talkingwallace

This issue was closed because it has been inactive for 1 days since being marked as stale. If this issue is still relevant or if there is new information, please feel free to update or reopen it.

github-actions[bot] avatar Jul 20 '24 02:07 github-actions[bot]