本文整理汇总了Python中graphviz.Graph.save方法的典型用法代码示例。如果您正苦于以下问题:Python Graph.save方法的具体用法?Python Graph.save怎么用?Python Graph.save使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类graphviz.Graph
的用法示例。
在下文中一共展示了Graph.save方法的2个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: generateGraph
# 需要导入模块: from graphviz import Graph [as 别名]
# 或者: from graphviz.Graph import save [as 别名]
def generateGraph():
G = Graph(
engine = 'dot',
filename = 'Btrfs-Graph.dot',
name = 'BRTFS-Browser',
comment = 'https://github.com/Zo0MER/BRTFS-Browser.git',
graph_attr = {'rankdir': 'RL',
'charset':'utf-8',
'bgcolor':'#eeeeee',
'labelloc':'t',
'splines':'compound',
'nodesep':'0.7',
'ranksep':'5'
},
node_attr = {'fontsize': '18.0',
'shape':'box'
}
)
#node with title and hyperlink on github
G.node('meta',
label = 'Btrfs-debug-tree \nhttps://github.com/Zo0MER/BRTFS-Browser.git',
href = 'https://github.com/Zo0MER/BRTFS-Browser.git',
fontcolor = '#4d2600',
fontsize = '30.0'
)
first = inode[0]
inode.remove(inode[0])
if (inode):
#link first item ROOT_TREE_DIR INODE_ITEM, INODE_REF with all INODE_ITEM EXTEND_DATA
for pair in inode:
G.edge(''.join([str(x) for x in first]), ''.join([str(x) for x in pair]))
else:
G.node(first)
#save *.dot and others
pathout = enterPath.get()
filenameout = enterFilename.get()
if (filenameout):
filenameout = filenameout + '.gv.dot'
else:
filenameout = "btrfs-graph"
G.filename = filenameout + '.gv.dot'
G.directory = pathout
G.save()
for t in types:
G.format = t
G.render()
示例2: main
# 需要导入模块: from graphviz import Graph [as 别名]
# 或者: from graphviz.Graph import save [as 别名]
def main(restricted = False):
global styles
#gc.set_debug(gc.DEBUG_LEAK)
site = input("What site to crawl?")
maxDepth = int(input("Max depth?"))
http = httplib2.Http()
links = set()
pages = set()
#dot = Digraph(comment = site, format="png")
dot = Graph(comment = site, format="png", engine="sfdp")
dot.overlap = "true"
#dot.graph_attr.update(size = "10000000,10000000")
try:
soup = BeautifulSoup(urllib2.urlopen(site), "html.parser")
pageTitle = soup.title.string
pages.add(pageTitle)
titles[site] = pageTitle
soup.decompose()
except Exception as e:
pageTitle = site
print("Error: {0}".format(e))
siteBase = ""
try:
pos1 = site.find(".")
pos2 = site.find(".", pos1 + 1)
siteBase = site[pos1+1:pos2]
except Exception as e:
print("Error: {0}".format(e))
print (siteBase)
crawlPage(site, pageTitle, maxDepth, pages, links, restricted, siteBase)
#print(pages)
#print(links)
#for p in pages:
#print("Adding node: " + p)
#dot.node(p)
for l in links:
try:
#print("Adding edge: " + l[0] + " -> " + l[1])
dot.edge(l[0], l[1])
except Exception as e:
print("Error: {0}".format(e))
#print(dot)
#dot = apply_styles(dot, styles)
loc = str(dot).find("{")+1
dot = Source(str(dot)[0:loc] + "\n\tgraph [overlap = prism]\n" + str(dot)[loc:], format="png", engine="sfdp")
#print("-------------------")
filename = r'C:\Users\Gabe\Miniconda3\MyScripts\test-crawler15'
dot.save()
try:
os.remove(filename)
except Exception as e:
print("Error: {0}".format(e))
try:
outFile = open(filename + ".txt", "w")
outFile.write(str(dot))
outFile.close()
except Exception as e:
print("Error: {0}".format(e))
dot.render(filename, view=True)