当前位置: 首页 > news >正文

温州网站建设制作公司中国十大网站

温州网站建设制作公司,中国十大网站,主机做网站服务器怎么设置,wordpress无法下载插件一、简介 GNN(Graph Neural Network)和GCN(Graph Convolutional Network)都是基于图结构的神经网络模型。本文目标就是打代码基础,未用PyG,来扒一扒Graph Net两个基础算法的原理。直接上代码。 二、代码 …

一、简介

        GNN(Graph Neural Network)和GCN(Graph Convolutional Network)都是基于图结构的神经网络模型。本文目标就是打代码基础,未用PyG,来扒一扒Graph Net两个基础算法的原理。直接上代码。

二、代码

import time
import random
import os
import numpy as np
import math
from torch.nn.parameter import Parameter
from torch.nn.modules.module import Moduleimport torch
import torch.nn as nn
import torch.nn.functional as F
import torch.optim as optimimport scipy.sparse as sp#配置项
class configs():def __init__(self):# Dataself.data_path = r'E:\code\Graph\data'self.save_model_dir = r'\code\Graph'self.model_name = r'GCN' #GNN/GCNself.seed = 2023self.device = torch.device("cuda" if torch.cuda.is_available() else "cpu")self.batch_size = 64self.epoch = 200self.in_features = 1433  #core ~ feature:1433self.hidden_features = 16  # 隐层数量self.output_features = 8  # core~paper-point~ 8类self.learning_rate = 0.01self.dropout = 0.5self.istrain = Trueself.istest = Truecfg = configs()def seed_everything(seed=2023):random.seed(seed)os.environ['PYTHONHASHSEED']=str(seed)np.random.seed(seed)torch.manual_seed(seed)seed_everything(seed = cfg.seed)#数据
class Graph_Data_Loader():def __init__(self):self.adj, self.features, self.labels, self.idx_train, self.idx_val, self.idx_test = self.load_data()self.adj = self.adj.to(cfg.device)self.features = self.features.to(cfg.device)self.labels = self.labels.to(cfg.device)self.idx_train = self.idx_train.to(cfg.device)self.idx_val = self.idx_val.to(cfg.device)self.idx_test = self.idx_test.to(cfg.device)def load_data(self,path=cfg.data_path, dataset="cora"):"""Load citation network dataset (cora only for now)"""print('Loading {} dataset...'.format(dataset))idx_features_labels = np.genfromtxt(os.path.join(path,dataset,dataset+'.content'),dtype=np.dtype(str))features = sp.csr_matrix(idx_features_labels[:, 1:-1], dtype=np.float32)labels = self.encode_onehot(idx_features_labels[:, -1])# build graphidx = np.array(idx_features_labels[:, 0], dtype=np.int32)idx_map = {j: i for i, j in enumerate(idx)}edges_unordered = np.genfromtxt(os.path.join(path,dataset,dataset+'.cites'),dtype=np.int32)edges = np.array(list(map(idx_map.get, edges_unordered.flatten())),dtype=np.int32).reshape(edges_unordered.shape)adj = sp.coo_matrix((np.ones(edges.shape[0]), (edges[:, 0], edges[:, 1])),shape=(labels.shape[0], labels.shape[0]),dtype=np.float32)# build symmetric adjacency matrixadj = adj + adj.T.multiply(adj.T > adj) - adj.multiply(adj.T > adj)features = self.normalize(features)adj = self.normalize(adj + sp.eye(adj.shape[0]))idx_train = range(140)idx_val = range(200, 500)idx_test = range(500, 1500)features = torch.FloatTensor(np.array(features.todense()))labels = torch.LongTensor(np.where(labels)[1])adj = self.sparse_mx_to_torch_sparse_tensor(adj)idx_train = torch.LongTensor(idx_train)idx_val = torch.LongTensor(idx_val)idx_test = torch.LongTensor(idx_test)return adj, features, labels, idx_train, idx_val, idx_testdef encode_onehot(self,labels):classes = set(labels)classes_dict = {c: np.identity(len(classes))[i, :] for i, c inenumerate(classes)}labels_onehot = np.array(list(map(classes_dict.get, labels)),dtype=np.int32)return labels_onehotdef normalize(self,mx):"""Row-normalize sparse matrix"""rowsum = np.array(mx.sum(1))r_inv = np.power(rowsum, -1).flatten()r_inv[np.isinf(r_inv)] = 0.r_mat_inv = sp.diags(r_inv)mx = r_mat_inv.dot(mx)return mxdef sparse_mx_to_torch_sparse_tensor(self,sparse_mx):"""Convert a scipy sparse matrix to a torch sparse tensor."""sparse_mx = sparse_mx.tocoo().astype(np.float32)indices = torch.from_numpy(np.vstack((sparse_mx.row, sparse_mx.col)).astype(np.int64))values = torch.from_numpy(sparse_mx.data)shape = torch.Size(sparse_mx.shape)return torch.sparse.FloatTensor(indices, values, shape)#精度评价指标
def accuracy(output, labels):preds = output.max(1)[1].type_as(labels)correct = preds.eq(labels).double()correct = correct.sum()return correct / len(labels)#模型
#01-GNN
class GNNLayer(nn.Module):def __init__(self, in_features, output_features):super(GNNLayer, self).__init__()self.linear = nn.Linear(in_features, output_features)def forward(self, adj_matrix, features):hidden_features = torch.matmul(adj_matrix, features)  # GNN公式:H' = A * Hhidden_features = self.linear(hidden_features)  # 使用线性变换hidden_features = F.relu(hidden_features)  # 使用ReLU作为激活函数return hidden_features
class GNN(nn.Module):def __init__(self, in_features, hidden_features, output_features, num_layers=2):super(GNN, self).__init__()#输入维度in_features、隐藏层维度hidden_features、输出维度output_features、GNN的层数num_layersself.layers = nn.ModuleList([GNNLayer(in_features, hidden_features) if i == 0 else GNNLayer(hidden_features, hidden_features) for i inrange(num_layers)])self.output_layer = nn.Linear(hidden_features, output_features)def forward(self, adj_matrix, features):hidden_features = featuresfor layer in self.layers:hidden_features = layer(adj_matrix, hidden_features)output = self.output_layer(hidden_features)return F.log_softmax(output,dim=1)#02-GCN
class GraphConvolution(Module):"""Simple GCN layer, similar to https://arxiv.org/abs/1609.02907"""def __init__(self, in_features, out_features, bias=True):super(GraphConvolution, self).__init__()self.in_features = in_featuresself.out_features = out_featuresself.weight = Parameter(torch.FloatTensor(in_features, out_features))if bias:self.bias = Parameter(torch.FloatTensor(out_features))else:self.register_parameter('bias', None)self.reset_parameters()def reset_parameters(self):stdv = 1. / math.sqrt(self.weight.size(1))self.weight.data.uniform_(-stdv, stdv)if self.bias is not None:self.bias.data.uniform_(-stdv, stdv)def forward(self, input, adj):support = torch.mm(input, self.weight)output = torch.spmm(adj, support)if self.bias is not None:return output + self.biaselse:return outputdef __repr__(self):return self.__class__.__name__ + ' (' \+ str(self.in_features) + ' -> ' \+ str(self.out_features) + ')'class GCN(nn.Module):def __init__(self, in_features, hidden_features, output_features, dropout=cfg.dropout):super(GCN, self).__init__()self.gc1 = GraphConvolution(in_features, hidden_features)self.gc2 = GraphConvolution(hidden_features, output_features)self.dropout = dropoutdef forward(self, adj_matrix, features):x = F.relu(self.gc1(features, adj_matrix))x = F.dropout(x, self.dropout, training=self.training)x = self.gc2(x, adj_matrix)return F.log_softmax(x, dim=1)class graph_run():def train(self):t = time.time()#Create Train Processingall_data = Graph_Data_Loader()#创建一个模型model = eval(cfg.model_name)(in_features=cfg.in_features,hidden_features=cfg.hidden_features,output_features=cfg.output_features).to(cfg.device)optimizer = optim.Adam(model.parameters(),lr=cfg.learning_rate, weight_decay=5e-4)#Trainmodel.train()for epoch in range(cfg.epoch):optimizer.zero_grad()output = model(all_data.adj, all_data.features)loss_train = F.nll_loss(output[all_data.idx_train], all_data.labels[all_data.idx_train])acc_train = accuracy(output[all_data.idx_train], all_data.labels[all_data.idx_train])loss_train.backward()optimizer.step()loss_val = F.nll_loss(output[all_data.idx_val], all_data.labels[all_data.idx_val])acc_val = accuracy(output[all_data.idx_val], all_data.labels[all_data.idx_val])print('Epoch: {:04d}'.format(epoch + 1),'loss_train: {:.4f}'.format(loss_train.item()),'acc_train: {:.4f}'.format(acc_train.item()),'loss_val: {:.4f}'.format(loss_val.item()),'acc_val: {:.4f}'.format(acc_val.item()),'time: {:.4f}s'.format(time.time() - t))torch.save(model, os.path.join(cfg.save_model_dir, 'latest.pth'))  # 模型保存def infer(self):#Create Test Processingall_data = Graph_Data_Loader()model_path = os.path.join(cfg.save_model_dir, 'latest.pth')model = torch.load(model_path, map_location=torch.device(cfg.device))model.eval()output = model(all_data.adj,all_data.features)loss_test = F.nll_loss(output[all_data.idx_test], all_data.labels[all_data.idx_test])acc_test = accuracy(output[all_data.idx_test], all_data.labels[all_data.idx_test])print("Test set results:","loss= {:.4f}".format(loss_test.item()),"accuracy= {:.4f}".format(acc_test.item()))if __name__ == '__main__':mygraph = graph_run()if cfg.istrain == True:mygraph.train()if cfg.istest == True:mygraph.infer()

三、结果与讨论

        需要从网上下载cora数据集,数据组织形式如下图。

        测了下Params和GFLOPs,还是比较大的,发现若作为一个Net的Block还是需要优化的哈哈~

ModelParamsGFLOPs
GNN23.352K126.258M
ModelCora(/train/val/test)
GNN1.0000/0.7800/0.7620
GCN0.9714/0.7767/0.8290

四、展望

        未来可以考虑用PyG(PyTorch Geometric),毕竟PyG实现GAT等图网络、图的数据组织、加载会更加方便。Graph Net通常用可以用于属性数据的embedding模式,将属性数据可以作为一种补充特征加入Net去训练,看能不能发挥效能。


文章转载自:
http://dinncofyi.zfyr.cn
http://dinncofunctor.zfyr.cn
http://dinncopotherb.zfyr.cn
http://dinncosternway.zfyr.cn
http://dinncopolyarticular.zfyr.cn
http://dinncosmtpd.zfyr.cn
http://dinncotyrannicide.zfyr.cn
http://dinncobeastings.zfyr.cn
http://dinncopolychromatic.zfyr.cn
http://dinncounsolved.zfyr.cn
http://dinncoemerita.zfyr.cn
http://dinncorhythmic.zfyr.cn
http://dinncoanchises.zfyr.cn
http://dinncomanent.zfyr.cn
http://dinncohemoglobinuric.zfyr.cn
http://dinncoenterobacterium.zfyr.cn
http://dinncocapriform.zfyr.cn
http://dinncoamylum.zfyr.cn
http://dinncosaggar.zfyr.cn
http://dinncosichuan.zfyr.cn
http://dinncopenghu.zfyr.cn
http://dinncosuffolk.zfyr.cn
http://dinncosulfurous.zfyr.cn
http://dinncoprotrudable.zfyr.cn
http://dinncoperfidious.zfyr.cn
http://dinncosoubriquet.zfyr.cn
http://dinncopyoderma.zfyr.cn
http://dinncohorizon.zfyr.cn
http://dinncomining.zfyr.cn
http://dinncoheimisch.zfyr.cn
http://dinncoacentric.zfyr.cn
http://dinncotradesman.zfyr.cn
http://dinncogare.zfyr.cn
http://dinncorocketman.zfyr.cn
http://dinncokilolitre.zfyr.cn
http://dinncotrinocular.zfyr.cn
http://dinnconavajoite.zfyr.cn
http://dinncogeogenic.zfyr.cn
http://dinncobristle.zfyr.cn
http://dinncoviand.zfyr.cn
http://dinncoburrhead.zfyr.cn
http://dinncoscutcher.zfyr.cn
http://dinncoelectrooculogram.zfyr.cn
http://dinncodependence.zfyr.cn
http://dinncophragmoplast.zfyr.cn
http://dinncoinnatism.zfyr.cn
http://dinncodehumidify.zfyr.cn
http://dinncostandpipe.zfyr.cn
http://dinncospacearium.zfyr.cn
http://dinncounwrought.zfyr.cn
http://dinncoalecto.zfyr.cn
http://dinncopeashooter.zfyr.cn
http://dinncointerwound.zfyr.cn
http://dinncopetrolic.zfyr.cn
http://dinncokourbash.zfyr.cn
http://dinncodyne.zfyr.cn
http://dinncomoose.zfyr.cn
http://dinncorepublicanise.zfyr.cn
http://dinncoonflow.zfyr.cn
http://dinncoergatocracy.zfyr.cn
http://dinncoparamnesia.zfyr.cn
http://dinncocommunalistic.zfyr.cn
http://dinncooratorio.zfyr.cn
http://dinncosparkle.zfyr.cn
http://dinncodogfight.zfyr.cn
http://dinncoradioacoustics.zfyr.cn
http://dinncolaterization.zfyr.cn
http://dinncodemagogue.zfyr.cn
http://dinncorhapsodise.zfyr.cn
http://dinncomonochrome.zfyr.cn
http://dinncogyrostatics.zfyr.cn
http://dinncoseaport.zfyr.cn
http://dinncogrotesquely.zfyr.cn
http://dinncophotoshp.zfyr.cn
http://dinncocounterfeiting.zfyr.cn
http://dinncooverly.zfyr.cn
http://dinncowladimir.zfyr.cn
http://dinncoformate.zfyr.cn
http://dinncopoppa.zfyr.cn
http://dinncovicarious.zfyr.cn
http://dinncounphilosophical.zfyr.cn
http://dinncoganglioid.zfyr.cn
http://dinncocentuplicate.zfyr.cn
http://dinncoobliger.zfyr.cn
http://dinncotrustbuster.zfyr.cn
http://dinncosubstantive.zfyr.cn
http://dinncoexcogitation.zfyr.cn
http://dinncounc.zfyr.cn
http://dinncoprepuberal.zfyr.cn
http://dinncoinsupportable.zfyr.cn
http://dinncoadhibition.zfyr.cn
http://dinncoinexplicit.zfyr.cn
http://dinncokhowar.zfyr.cn
http://dinncostrobilus.zfyr.cn
http://dinncopainkiller.zfyr.cn
http://dinncomorayshire.zfyr.cn
http://dinncodifferential.zfyr.cn
http://dinncomoistify.zfyr.cn
http://dinncodovishness.zfyr.cn
http://dinncopermanganate.zfyr.cn
http://www.dinnco.com/news/105313.html

相关文章:

  • 做旅游网站需要注意什么网络优化工资一般多少
  • 昭通网站开发seo搜索引擎优化哪家好
  • 德阳网站建设平台wordpress建站公司
  • 西安制作网站公司哪家好搜索引擎官网
  • 镇江疫情最新消息今天封城了免费seo软件推荐
  • 网站开发流程包括网站在线优化工具
  • 给公司做网站需要什么肇庆疫情最新消息
  • 上海网站建设公司电话seo推广哪家服务好
  • 做驾校题目用什么网站好站长工具综合查询官网
  • 刘淼 网站开发做一个网站要花多少钱
  • 程序员做外包网站2345浏览器影视大全
  • 企业官网建站联系我们搜索词和关键词
  • 晋城 网站建设营销型网站建设价格
  • 网站设计网络推广steam交易链接在哪复制
  • 做蔬菜线上的网站谷歌seo新规则
  • 网站估值网络营销的4p策略
  • 做新闻封面的网站东莞搜索排名提升
  • 宿州网站开发西安sem竞价托管
  • 做热图的在线网站深圳网站seo地址
  • 阿里巴巴网站怎么做全屏分类广告营销是做什么的
  • wordpress 怎么加速在线观看的seo综合查询
  • wdcp wordpress伪静态成都网站快速排名优化
  • 旅游网站开发的目的和意义seo搜索引擎优化试题及答案
  • 如何做商业网站排名优化公司哪家效果好
  • 计算机毕设代做网站杭州哪家seo公司好
  • 北京外贸网站建设公司鼓楼网站seo搜索引擎优化
  • 北京网站维护seo专业培训机构
  • 湖南免费网站建设深圳seo优化排名优化
  • 做家教什么网站公司企业网站制作需要多少钱
  • 大连成品网站建设培训心得体会总结