PGL
PGL copied to clipboard
Warning: PaddlePaddle catches a failure signal, it may not work properly
W0702 12:33:15.146848 3068 init.cc:218] You could check whether you killed PaddlePaddle thread/process accidentally or report the case to PaddlePaddle W0702 12:33:15.146860 3068 init.cc:221] The detail failure signal is:
W0702 12:33:15.146873 3068 init.cc:224] *** Aborted at 1593664395 (unix time) try "date -d @1593664395" if you are using GNU date *** W0702 12:33:15.149258 3068 init.cc:224] PC: @ 0x0 (unknown) W0702 12:33:15.150092 3068 init.cc:224] *** SIGSEGV (@0x558e0a54bca0) received by PID 3068 (TID 0x7f4da11da740) from PID 173325472; stack trace: *** W0702 12:33:15.152169 3068 init.cc:224] @ 0x7f4da0dc45f0 (unknown) W0702 12:33:15.153133 3068 init.cc:224] @ 0x7f4d5e96b892 (unknown) W0702 12:33:15.154075 3068 init.cc:224] @ 0x7f4d5e96da6e (unknown) W0702 12:33:15.154742 3068 init.cc:224] @ 0x558e04472c94 _PyMethodDef_RawFastCallKeywords W0702 12:33:15.155367 3068 init.cc:224] @ 0x558e04472db1 _PyCFunction_FastCallKeywords W0702 12:33:15.155993 3068 init.cc:224] @ 0x558e044de5be _PyEval_EvalFrameDefault W0702 12:33:15.156579 3068 init.cc:224] @ 0x558e044222b9 _PyEval_EvalCodeWithName W0702 12:33:15.157160 3068 init.cc:224] @ 0x558e04423610 _PyFunction_FastCallDict W0702 12:33:15.157730 3068 init.cc:224] @ 0x558e04441b93 _PyObject_Call_Prepend W0702 12:33:15.158006 3068 init.cc:224] @ 0x558e044790aa slot_tp_init W0702 12:33:15.158625 3068 init.cc:224] @ 0x558e04479ca8 _PyObject_FastCallKeywords W0702 12:33:15.159250 3068 init.cc:224] @ 0x558e044ded78 _PyEval_EvalFrameDefault W0702 12:33:15.159832 3068 init.cc:224] @ 0x558e0442331b _PyFunction_FastCallDict W0702 12:33:15.160137 3068 init.cc:224] @ 0x558e04484dc2 property_descr_get W0702 12:33:15.160692 3068 init.cc:224] @ 0x558e044369f1 _PyObject_GenericGetAttrWithDict W0702 12:33:15.161314 3068 init.cc:224] @ 0x558e044da0ba _PyEval_EvalFrameDefault W0702 12:33:15.161890 3068 init.cc:224] @ 0x558e044222b9 _PyEval_EvalCodeWithName W0702 12:33:15.162429 3068 init.cc:224] @ 0x558e04472435 _PyFunction_FastCallKeywords W0702 12:33:15.163048 3068 init.cc:224] @ 0x558e044d9e70 _PyEval_EvalFrameDefault W0702 12:33:15.163589 3068 init.cc:224] @ 0x558e0447220b _PyFunction_FastCallKeywords W0702 12:33:15.164211 3068 init.cc:224] @ 0x558e044d9e70 _PyEval_EvalFrameDefault W0702 12:33:15.164783 3068 init.cc:224] @ 0x558e044222b9 _PyEval_EvalCodeWithName W0702 12:33:15.165321 3068 init.cc:224] @ 0x558e04472497 _PyFunction_FastCallKeywords W0702 12:33:15.165946 3068 init.cc:224] @ 0x558e044dacba _PyEval_EvalFrameDefault W0702 12:33:15.166486 3068 init.cc:224] @ 0x558e0447220b _PyFunction_FastCallKeywords W0702 12:33:15.167104 3068 init.cc:224] @ 0x558e044d9be6 _PyEval_EvalFrameDefault W0702 12:33:15.167680 3068 init.cc:224] @ 0x558e044222b9 _PyEval_EvalCodeWithName W0702 12:33:15.168279 3068 init.cc:224] @ 0x558e044231d4 PyEval_EvalCodeEx W0702 12:33:15.168853 3068 init.cc:224] @ 0x558e044231fc PyEval_EvalCode W0702 12:33:15.169260 3068 init.cc:224] @ 0x558e04538f44 run_mod W0702 12:33:15.169814 3068 init.cc:224] @ 0x558e045432b1 PyRun_FileExFlags W0702 12:33:15.170437 3068 init.cc:224] @ 0x558e045434a3 PyRun_SimpleFileExFlags Segmentation fault
Can you provide more details on how you run the code?
cd PGL/examples/line python line.py
@Lxhnnn what are your running environments? such as version of paddle and pgl, linux or windows?
I met a similar problem.
paddle version: 1.6.3gpu and 1.8.0gpu (I tried both versions, os report same errors.)
os: linux ubuntu 16.04(ai studio)
The following is the code that report the error.
import pgl
import pandas as pd
links = pd.read_csv('ml-latest-small/links.csv')
movies = pd.read_csv('ml-latest-small/movies.csv')
ratings = pd.read_csv('ml-latest-small/ratings.csv')
tags = pd.read_csv('ml-latest-small/tags.csv')
userIDs = ratings['userId'].unique().tolist()
itemIDs = ratings['movieId'].unique().tolist()
print("num of userIDs: {}".format(len(userIDs)))
print("num of itemIDs: {}".format(len(itemIDs)))
minUserID = min(userIDs)
transferred_userIDs = [c-minUserID for c in userIDs]
maxUserID = max(transferred_userIDs)
userid_map = dict([(o_id,id) for o_id,id in zip(userIDs,transferred_userIDs)])
minItemID = min(itemIDs)
transferred_itemIDs = [c-minItemID for c in itemIDs]
transferred_itemIDs = [c + maxUserID+1 for c in transferred_itemIDs ]
itemid_map = dict([(o_id,id) for o_id,id in zip(itemIDs,transferred_itemIDs)])
edges = {}
edges['like'] = []
edges['neutral'] = []
edges['dislike'] = []
i = 0
for index, row in ratings.iterrows():
userId = int(row['userId'])
movieId = int(row['movieId'])
rating = int(row['rating'])
timestamp = int(row['timestamp'])
# print(userId, movieId, rating, timestamp)
if rating>3:
edges['like'].append((userid_map[userId], itemid_map[movieId] ))
elif rating == 3:
edges['neutral'].append((userid_map[userId], itemid_map[movieId] ))
else:
edges['dislike'].append((userid_map[userId], itemid_map[movieId] ) )
node_types = []
for node in userIDs :
node_types.append((userid_map[node],'user'))
for node in itemIDs:
node_types.append((itemid_map[node],'movie'))
print("build nodes finished.")
import numpy as np
num_nodes = len(node_types)
node_features = {'features': np.random.randn(num_nodes,8).astype('float32')}
edge_num_list = []
for edge_type in edges:
edge_num_list.append(len(edges[edge_type]))
edge_features = {
'like': {'h': np.random.randn(edge_num_list[0],4)},
'neutral': {'h': np.random.randn(edge_num_list[1],4)},
'dislike': {'h': np.random.randn(edge_num_list[2],4)}
}
import paddle.fluid as fluid
import paddle.fluid.layers as fl
from pgl import heter_graph
from pgl import heter_graph_wrapper
g = heter_graph.HeterGraph(
num_nodes = num_nodes,
edges = edges,
node_types = node_types,
node_feat = node_features,
edge_feat = edge_features
)
place = fluid.CPUPlace()
gw = heter_graph_wrapper.HeterGraphWrapper(
name='heter_graph',
place = place,
edge_types = g.edge_types_info(),
node_feat = g.node_feat_info(),
edge_feat = g.edge_feat_info()
)
Please make sure that the max node ID is less than the "num_nodes".
I guess that your node numbers are not consecutive.