37,720
社区成员
发帖
与我相关
我的任务
分享
def load_for_dataNname(train_path='/home/data/train.txt',
val_path='/home/data/val.txt',
batch_size=64):
with open(train_path, 'rb') as f:
dict_1 = pickle.load(f, encoding='iso-8859-1')
with open(val_path, 'rb') as fin:
dict_2 = pickle.load(fin, encoding='iso-8859-1')
x = []
name_list = []
cnt=0
dicts_list=[dict_1,dict_2]
while 1:
for feats in dicts_list:
for name in feats:
feat = feats[name]
x.append(feat)
name_list.append(name)
cnt+=1
if cnt==batch_size:
cnt=0
random.seed(1)
random.shuffle(x)
random.seed(1)
random.shuffle(name_list)
yield (np.array(x),name_list)
x=[]
name_list=[]
if __name__ == '__main__':
data = load_for_dataNname()
for d in data:
da = d[0]#想把data数据分出来导入下面的模型训练中
name = d[1]
#model
x=input(shape=(10,))
x=Dense(20,activation='relu')(x)
output=Dense(100,activation='softmax')(x)
model.fit_generator(da,
steps_per_epoch=100,
epochs=10,
verbose=1)
def test():
a = 1
b = 2
c = 3
yield a,b,c
d = test()
for x in d:
for y in x:
print(y)
def test():
a = 1
b = 2
c = 3
yield a,b,c
d = test()
for x in d:
print(x)