#!/usr/bin/env python3
#
# generate Hamiltonian Graphs and
# count the steps a depth first search algorithm takes to verify it as a Hamiltonian
#
# to add backtracking just double the time and subtract the order
#TODO refactor
#TODO save to file, restart from file
#TODO multiprocess: pipeline merge / sequential union
import sys
import itertools
import math
import cProfile
import datetime
from multiprocessing import Pool
#from multiprocessing import Process
def gen_HG(size, order):
'generate Hamiltonian graphs (labled)'
def adde(edges):
'add edges to cycles -> as set of tuples of tuples'
itera = itertools.combinations(edges, choose_k)
return map(lambda x: tuple(sorted(cycle_2_line + list(x))), itera)
def potential_edges(cycle):
"list all possible edges, cycle is in one line notation -> list of tuples"
chosen = []
if choose_k == 0: # cyclic graphs won't use cross links
return chosen
for i in list(range(order)):
if i == 0:
for j in list(range(2,order-1)):
chosen.append(tuple(sorted([ cycle[i], cycle[j] ])))
else:
for j in list(range(i+2,order)):
chosen.append(tuple(sorted([ cycle[i], cycle[j] ])))
return chosen
def two_line(cyc_1_line):
'put cycle into a graph notation -> list of tuples'
cyc_2_line = []
for i in list(range(order)):
cyc_2_line.append(tuple( sorted([cyc_1_line[i-1], cyc_1_line[i]]) ))
return sorted(cyc_2_line)
choose_k = size - order
graphs = set([])
abc = 0
xyz = math.factorial(order)
a1 = datetime.datetime.now()
for cycle_1_line in itertools.permutations(list(range(order))):
cycle_2_line = two_line(cycle_1_line)
pe = potential_edges(cycle_1_line)
incl_excl = adde(pe)
graphs = graphs.union( incl_excl ) # this is resource intense
abc += 1 #Z05
a2 = datetime.datetime.now() - a1
a3 = int(a2.total_seconds() / 3600)
a4 = int( (a2.total_seconds() - (a3 * 3600)) / 60 )
print( ' ', 100 * abc / xyz, '% time spent', a3,':',a4,
' time left', ((a2.total_seconds() / 3600) * (1 - abc / xyz)) )
print('### done generating HGs')
return list(graphs)
# broke up gen_HG for m_gen_times() / multitasking parallelizm
def adde(edges, choose_k, cycle_2_line):
'add edges to cycles -> as set of tuples of tuples'
itera = itertools.combinations(edges, choose_k)
return map(lambda x: tuple(sorted(cycle_2_line + list(x))), itera)
def potential_edges(cycle, order, choose_k):
'list all possible edges, cycle is in one line notation -> list of tuples'
chosen = []
if choose_k == 0: # cyclic graphs won't use cross links
return chosen
for i in list(range(order)):
if i == 0:
for j in list(range(2,order-1)):
chosen.append(tuple(sorted([ cycle[i], cycle[j] ])))
else:
for j in list(range(i+2,order)):
chosen.append(tuple(sorted([ cycle[i], cycle[j] ])))
return chosen
def two_line(cyc_1_line,order):
'put cycle into a graph notation -> list of tuples'
cyc_2_line = []
for i in list(range(order)):
cyc_2_line.append(tuple( sorted([cyc_1_line[i-1], cyc_1_line[i]]) ))
return sorted(cyc_2_line)
def v_some_merg(env):
'gen some graphs (and partial merge)'
order = env[1][0]
choose_k = env[1][1]
graphs = set([])
for cycle_1_line in env[0]:
cycle_2_line = two_line(cycle_1_line, order)
pe = potential_edges(cycle_1_line, order, choose_k)
incl_excl = adde(pe, choose_k, cycle_2_line)
graphs = graphs.union( incl_excl )
return graphs
def DF_search(graph):
'a depth first traversal: counts nodes tried, but not backtracking'
def DFS(path, tm, found):
'recursive deapth first search'
def check_for_success(v):
'need a seperate check because the start vertex has been visited'
# edges must be tuples!
order = max(map(max, graph))+1
return len(path) == order and (0,v) in graph
def adjacent(pair):
'a list of the adacent nodes'
if v in pair:
return list( set(pair) - set([v]) )[0]
else:
return v # placeholder will be removed
tm += 1 # count working vertex
v = path[-1] # the vertex we are working with
if check_for_success(v):
found = True
return [found, tm]
avail = list(set(map(adjacent, graph)) - set(path))
avail.sort()
for next in avail:
[found, tm] = DFS(path + [next], tm, False)
if found:
return [found, tm]
# no more available > > backtrack
return [found, tm]
a = DFS([0], 0, False)
return a
def test_HG_by_size(sz):
'test all HG of a size'
def triang(n):
'triangular number'
return ((n-1)*(n-1) + (n-1)) / 2
ordr = sz
while triang(ordr) >= sz:
test_HG_by_size_and_order(sz, ordr)
ordr -= 1
def test_HG_by_size_and_order(sz,ordr):
'test all HG of a given size and order'
times = []
gs = gen_HG(sz, ordr)
print("> > > size", sz, " order", ordr)
for g in gs:
times.append(DF_search(g)[1])
times.sort()
while len(times) > 0:
ct = times.count(times[0])
print("#HG", ct, ' time', times[0])
times = times[ct:]
def partition(num_cores, lst):
'partition a list for multiprocessing'
cores = list(range(num_cores))
pt = map(lambda core:
list(filter(lambda elm:
core == lst.index(elm) % num_cores, lst)), cores)
return list(pt)
def thread_state(llst, tpl):
'wrap values for multiprocessing'
a1 = map(lambda perms: [perms, tpl], llst)
return list(a1)
def m_gen_times(sz,ordr):
'parallel code: generate HG then get DFS time'
#
choose_k = sz - ordr
num_cores = 4
p = Pool(processes=num_cores)
c1 = list( itertools.permutations(list(range(ordr))) )
c2 = partition(num_cores, c1)
c3 = thread_state(c2, (ordr, choose_k))
c4 = p.map(v_some_merg, c3)
# hard coded to num_cores, merge
gs = list(c4[0].union(c4[1],c4[2],c4[3]))
print("> > > size", sz, " order", ordr)
times = list(map(lambda w: w[1], p.map(DF_search, gs)))
for i in list(range(len(gs))):
print('time', times[i], ' ', gs[i])
print(' ')
times.sort()
while len(times) > 0:
ct = times.count(times[0])
print("#LHG", ct, ' time', times[0])
times = times[ct:]
def relabel(g, label):
'relabel a graph using a labeling (1-line notation)'
ng = []
for tup in g:
ng.append(tuple(sorted([ label[tup[0]], label[tup[1]] ])))
ng.sort()
return ng
def test_rr(graph, gs, o):
'is a rotation or reflection of the graph in the list of graphs?'
def test_r(graph, gs, lst):
'is a rotation of the graph in the list of graphs?'
for rotation in lst:
perm = list(lst[rotation:]) + list(lst[0:rotation])
ng = relabel(graph, perm)
if ng in gs:
return True
return False
l2 = list(range(o))
if test_r(graph, gs, l2):
return True
else:
l2.reverse()
if test_r(graph, gs, l2):
return True
return False
def v_gen_1set_ul(env):
'generate (a labled G for each) unlabled HG of a given order and size'
n_graphs = []
order = env[1][0]
o_graphs = env[1][1]
# each edge size -> all unlabled combinations
##for edgsz in list(range(2, order//2+1)):
for edgsz in env[0]:
# each unlabled graph of one less edge
for g1 in o_graphs:
# rotate the edge with respect to the graph
for rotation in list(range(order)):
g2 = g1 + [tuple( sorted([rotation, (rotation + edgsz) % order]) )]
# eliminate duplicates, simple graphs only
g2 = list(set(g2))
if len(g2) == len(g1)+1:
g2.sort()
if not test_rr(g2, n_graphs, order):
n_graphs.append(g2)
return set(map(lambda m: tuple(m), n_graphs))
def gen_ul(order, old_graphs=None):
'generate (a labled G for each) unlabled HG of a given order (and optionaly, size)'
# the size is implicite in the old_graphs
def triang(n):
'triangular number'
return ((n-1)*(n-1) + (n-1)) / 2
#initialize
cycle = two_line(list(range(order)), order)
pul = Pool(processes=4)
if old_graphs is None:
old_graphs = [cycle]
size = order+1
# the code adds edges, so need to compensate cycles
unlabeled_graphs = [old_graphs]
while size <= triang(order):
new_graphs = list(v_gen_1set_ul((list(range(2, order//2+1)), (order, old_graphs))))
old_graphs = map(lambda m: list(m), new_graphs)
unlabeled_graphs.append(new_graphs)
print('gen_ul > size', size, ' order', order, ' #', len(new_graphs), ' >', datetime.datetime.now().strftime("%I:%M%p") )
#print(map(lambda g: list(set(g) - set(cycle)), new_graphs)) # strip cyclic subgraph
#aa3 = reduce(lambda x,y: (x[0]+y[0], x[1]+y[1]), map (num_HC_edges, new_graphs))
#print('gen_ul> ', len(new_graphs), 'graphs with', aa3[0],'/',aa3[1], 'chance of circuit cross-link.')
size += 1
# the number of HC for these unlabelled HG
aa4 = list(map (find_HCs, new_graphs))
else:
unlabeled_graphs = list(v_gen_1set_ul((list(range(2, order//2+1)), (order, old_graphs))))
#print('gen_unl>>> unlabeled_graphs =',unlabeled_graphs)
return unlabeled_graphs #[0:-1]
def m_gen_ul(order, old_graphs):
'a multiprocess gen_ul'
cycle = two_line(list(range(order)), order)
pul = Pool(processes=4)
out1 = pul.map(v_gen_1set_ul, thread_state(partition(4,
list(range(2, order//2+1))), (order, old_graphs)))
# there is no standard form -> must merge with test_rr
unlabeled_graphs = list(map(lambda c: list(c), out1[0]))
out2 = list(map(lambda c: list(c), out1[0].union(out1[1],out1[2],out1[3]) - out1[0]))
for g3 in out2:
if not test_rr(g3, unlabeled_graphs, order):
unlabeled_graphs.append(g3)
print('m_gen_ul>>> #HG =', len(unlabeled_graphs))
return unlabeled_graphs
def to_file(unlabeled_graphs, numHC):
'write to file'
order = max(map(lambda e: e[1], unlabeled_graphs[0])) + 1
fout = open('H_' + str(order) + '_' + str(len(unlabeled_graphs[0])), 'w')
fout.write('unHG = ' + str(list(map(lambda n: list(n), unlabeled_graphs))))
fout.write('\nhc_cnt = ' + str(numHC))
fout.write('\n\"\"\"\n')
#print('m_cnt_HC> ', list(map(lambda r: l.count(r), list(range(max(l)+1,1)) )))
while len(numHC) != 0:
k = min(numHC)
fout.write('HCs ' + str(k) + ' #HG ' + str(numHC.count(k)) + '\n')
numHC = list(filter(lambda h: h!=k, numHC))
fout.write('\"\"\"\n')
fout.close()
def v_cnt_HC(graphs):
'vectorized count HCs'
lst = []
for g in graphs:
lst.append(len(find_HCs(g)))
return lst
def m_cnt_HC(graphs):
'multiprocessing needs to be called from top level'
num_cores = 4
pul = Pool(processes=num_cores)
numHC = pul.map(v_cnt_HC, partition(num_cores, graphs))
##l = functools.reduce(lambda x,y: x+y, numHC)
#for i in list(range(len(numHC[0]))):
# for j in list(range(num_cores)):
# l.append(numHC[j][i])
###l = num_HC[0] + numHC[1] + numHC[2] + numHC[3]
l = []
i = 0
j = 0
while i < len(numHC[j]):
while j < len(numHC) and i < len(numHC[j]):
l.append(numHC[j][i])
j += 1
j = 0
i += 1
return l
def num_HC_edges(graph):
'assuming a consecutive cycle, test the other edges'
order = max(map(lambda w: w[1], graph))+1
cycle = two_line(list(range(order)), order)
edges = list(set(graph) - set(cycle))
HC_edges = 0
for e in edges:
l1 = list(range(order))
for i in l1:
if i > e[1]:
l1[i] -= 1
elif i == e[1]:
l1[i] = e[0]
g2 = relabel(list(set(graph) - set([e])), l1)
o1 = DF_search(g2)
if o1[0]:
HC_edges += 1
return (HC_edges, len(edges))
#--------------------------------------------------
def DFS_path(graph):
'a depth first traversal: counts nodes tried, but not backtracking'
def DFS(path, tm):
'recursive deapth first search'
def check_for_success(v):
'need a seperate check because the start vertex has been visited'
# edges must be tuples!
order = max(map(max, graph))+1
return len(path) == order and (0,v) in graph
def adjacent(pair):
'a list of the adacent nodes'
if v in pair:
return list( set(pair) - set([v]) )[0]
else:
return v # placeholder will be removed
tm += 1 # count working vertex
v = path[-1] # the vertex we are working with
if check_for_success(v):
return [path, tm]
avail = list(set(map(adjacent, graph)) - set(path))
avail.sort()
for next in avail:
[path, tm] = DFS(path + [next], tm)
if check_for_success(path[-1]):
return [path, tm]
# no more available > > backtrack
return [path[:-1], tm]
a = DFS([0], 0)
return a
def find_HCs(graph):
'try to find all HC'
def rotate(lst3, start):
'rotate a list to start'
i3 = lst3.index(start)
return lst3[i3:] + lst3[:i3]
def gen_labels(order, graph):
'generate labels - symmetric group minus dihedral group '
ord_1 = order - 1
return map(lambda v: v + (ord_1,),
filter(lambda u: u[0]\n" %\
(math.sin(one_segment * edg[0]) * self.radius + self.center[0],
math.cos(one_segment * edg[0]) * self.radius + self.center[1],
math.sin(one_segment * edg[1]) * self.radius + self.center[0],
math.cos(one_segment * edg[1]) * self.radius + self.center[1])
lst1.append(str1)
## add labels
#for i in list(range(max(map(lambda w3: w3[1], self.graph))+1)):
# lst1.append(" \n %s\n \n" %\
# (math.sin(one_segment * i) * self.radius + self.center[0] -2,
# math.cos(one_segment * i) * self.radius + self.center[1] +2,
# 14, str(i)) )
return lst1
def graph_diagram(g):
scene = Scene('aaaa',100,100)
scene.add(HG((0,0),100,g))
scene.write_svg()
scene.display()
# use a CLI
if __name__ == '__main__':
if len(sys.argv) > 1:
if sys.argv[1] == 'gen':
m_gen_times(int(sys.argv[2]),int(sys.argv[3]))
elif sys.argv[1] == 'unl':
aa1 = gen_ul(int(sys.argv[2]))
elif sys.argv[1] == 'unf':
fin = open(sys.argv[2])
exec(fin.readline())
fin.close()
ordr = max(list(map(lambda p: p[1], unHG[0]))) + 1
print('starting unlabeled HG of order', ordr, 'size', len(unHG[0]), ' #HG =', len(unHG))
unG = m_gen_ul(ordr, unHG)
nHC = m_cnt_HC(unG)
to_file(unG, nHC)
else:
print("""
generate Hamiltonian Graphs and list Depth First Search timings
usage: gen <# edges / size> <# verticies / order>
unl <# edges / size>
or
unf """)
else:
print("""
generate Hamiltonian Graphs and list Depth First Search timings
usage: gen <# edges / size> <# verticies / order>
unl <# edges / size>
or
unf """)