本文整理汇总了Python中itertools.combinations方法的典型用法代码示例。如果您正苦于以下问题:Python itertools.combinations方法的具体用法?Python itertools.combinations怎么用?Python itertools.combinations使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类itertools
的用法示例。
在下文中一共展示了itertools.combinations方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: init_W
# 需要导入模块: import itertools [as 别名]
# 或者: from itertools import combinations [as 别名]
def init_W(self, mode='normal'):
self.W = {}
if (self.status != 'load_train_data') and (self.status != 'train'):
print("Please load train data first.")
return self.W
self.status = 'init'
self.data_num = len(self.train_Y)
self.data_demension = len(self.train_X[0])
self.class_list = list(itertools.combinations(np.unique(self.train_Y), 2))
for class_item in self.class_list:
self.W[class_item] = np.zeros(self.data_demension)
return self.W
示例2: params
# 需要导入模块: import itertools [as 别名]
# 或者: from itertools import combinations [as 别名]
def params(self) -> Iterable[sympy.Symbol]:
"""The parameters of the ansatz."""
for i in range(self.iterations):
for p in range(len(self.qubits)):
if (self.include_all_z or not
numpy.isclose(self.hamiltonian.one_body[p, p], 0)):
yield LetterWithSubscripts('U', p, i)
for p, q in itertools.combinations(range(len(self.qubits)), 2):
if (self.include_all_xxyy or not
numpy.isclose(self.hamiltonian.one_body[p, q].real, 0)):
yield LetterWithSubscripts('T', p, q, i)
if (self.include_all_yxxy or not
numpy.isclose(self.hamiltonian.one_body[p, q].imag, 0)):
yield LetterWithSubscripts('W', p, q, i)
if (self.include_all_cz or not
numpy.isclose(self.hamiltonian.two_body[p, q], 0)):
yield LetterWithSubscripts('V', p, q, i)
示例3: from_words
# 需要导入模块: import itertools [as 别名]
# 或者: from itertools import combinations [as 别名]
def from_words(cls, words, window_size=3):
"""Construct a TrigramCollocationFinder for all trigrams in the given
sequence.
"""
if window_size < 3:
raise ValueError("Specify window_size at least 3")
wfd = FreqDist()
wildfd = FreqDist()
bfd = FreqDist()
tfd = FreqDist()
for window in ngrams(words, window_size, pad_right=True):
w1 = window[0]
if w1 is None:
continue
for w2, w3 in _itertools.combinations(window[1:], 2):
wfd[w1] += 1
if w2 is None:
continue
bfd[(w1, w2)] += 1
if w3 is None:
continue
wildfd[(w1, w3)] += 1
tfd[(w1, w2, w3)] += 1
return cls(wfd, bfd, wildfd, tfd)
示例4: canInd
# 需要导入模块: import itertools [as 别名]
# 或者: from itertools import combinations [as 别名]
def canInd(P, ni):
l = len(P)
ind = range(l)
if ni < 2:
return [[xx] for xx in ind]
if ni >= l:
return [ind]
im = intersection_matrix(P)
can = []
for w in combinations(ind, ni):
fg = True
for i in w:
for j in w:
if im[i, j] == 0:
fg = False
break
if not fg:
break
if fg:
can.append(list(w))
return can
示例5: get_ngrams
# 需要导入模块: import itertools [as 别名]
# 或者: from itertools import combinations [as 别名]
def get_ngrams(sentence, max_length, skip_ngrams=False, add_tags = True):
# return ngrams of length up to max_length as found in sentence.
out = []
words = sentence.split()
if add_tags :
words = ["<s>"]+words+["</s>"]
if not skip_ngrams :
for i in range(len(words)):
for n in range(1,min(max_length+1, len(words)-i+1)):
this_ngram = " ".join(words[i:i+n])
out.append((this_ngram,[]))
else :
for n in range(1, max_length+1):
subsets = set(itertools.combinations(range(len(words)), n))
for subset in subsets:
subset = sorted(subset)
dists = [(subset[i]-subset[i-1]) for i in range(1, len(subset))]
out.append((" ".join([words[j] for j in subset]), dists))
return out
示例6: connected_combos
# 需要导入模块: import itertools [as 别名]
# 或者: from itertools import combinations [as 别名]
def connected_combos(self, possible_nodes, size):
"""
Computes the number of different connected subsets of `possible_nodes`
containing `size` nodes.
Parameters
----------
possible_nodes : list
A list of node (qubit) labels.
size : int
The size of the connected subsets being sought (counted).
Returns
-------
int
"""
count = 0
for selected_nodes in _itertools.combinations(possible_nodes, size):
if self.are_glob_connected(selected_nodes): count += 1
return count
示例7: get_all_connected_sets
# 需要导入模块: import itertools [as 别名]
# 或者: from itertools import combinations [as 别名]
def get_all_connected_sets(self, n):
"""
Returns all connected sets of `n` qubits. Note that for a large device with
this will be often be an unreasonably large number of sets of qubits, and so
the run-time of this method will be unreasonable.
Parameters
----------
n: int
The number of qubits within each set.
Returns
-------
list
All sets of `n` connected qubits.
"""
connectedqubits = []
for combo in _iter.combinations(self.qubit_labels, n):
if self.qubitgraph.subgraph(list(combo)).are_glob_connected(combo):
connectedqubits.append(combo)
return connectedqubits
#Note: Below method gets all subgraphs up to full graph size.
示例8: find_all_sets_of_compatible_twoQgates
# 需要导入模块: import itertools [as 别名]
# 或者: from itertools import combinations [as 别名]
def find_all_sets_of_compatible_twoQgates(edgelist, n, gatename='Gcnot', aslabel=False):
"""
todo.
n : int . the number of two-qubit gates to have in the set.
"""
co2Qgates = []
# Go for all combinations of n two-qubit gates from the edgelist.
for npairs in _itertools.combinations(edgelist, n):
# Make a list of the qubits involved in the gates
flat_list = [item for sublist in npairs for item in sublist]
# If no qubit is involved in more than one gate we accept the combination
if len(flat_list) == len(set(flat_list)):
if aslabel:
co2Qgates.append([_lbl.Label(gatename, pair) for pair in npairs])
else:
co2Qgates.append([gatename + ':' + pair[0] + ':' + pair[1] for pair in npairs])
return co2Qgates
示例9: connected_combos
# 需要导入模块: import itertools [as 别名]
# 或者: from itertools import combinations [as 别名]
def connected_combos(self, possible_indices, size):
count = 0
for selected_inds in _itertools.combinations(possible_indices, size):
if self.are_connected(selected_inds): count += 1
return count
# def remove(self, node):
# """ Remove all references to node """
# for n, cxns in self._graph.iteritems():
# try:
# cxns.remove(node)
# except KeyError:
# pass
# try:
# del self._graph[node]
# except KeyError:
# pass
示例10: complete
# 需要导入模块: import itertools [as 别名]
# 或者: from itertools import combinations [as 别名]
def complete(self) -> _Completions:
valid_values = self.valtype.valid_values
if valid_values is None:
return None
out = []
# Single value completions
for value in valid_values:
desc = valid_values.descriptions.get(value, "")
out.append((json.dumps([value]), desc))
combinables = self.combinable_values
if combinables is None:
combinables = list(valid_values)
# Generate combinations of each possible value combination
for size in range(2, len(combinables) + 1):
for combination in itertools.combinations(combinables, size):
out.append((json.dumps(combination), ''))
return out
示例11: generate_dataset
# 需要导入模块: import itertools [as 别名]
# 或者: from itertools import combinations [as 别名]
def generate_dataset(sample_range,repo_path,num_speaker=2):
'''
A function to generate dataset
:param sample_range: range of the sample to create the dataset
:param repo_path: audio repository
:param num_speaker: number of speaker to separate
:return: X_data, y_data
'''
audio_path_list = generate_path_list(sample_range,repo_path)
num_data = 0
combinations = itertools.combinations(audio_path_list,num_speaker)
for combo in combinations:
num_data += 1
generate_mix_sample(combo,num_speaker)
print('number of the data generated: ',num_data)
示例12: _LeastSimilarCoverage
# 需要导入模块: import itertools [as 别名]
# 或者: from itertools import combinations [as 别名]
def _LeastSimilarCoverage(files, subset):
"""Returns pair of fonts having inconsistent coverage for a subset.
Args:
files: List of font files
subset: Name of subset
Returns:
3 tuple of (file1, file2, number of codepoints difference)
"""
worst = (None, None, 0)
subsetcps = fonts.CodepointsInSubset(subset, True)
for pair in itertools.combinations(files, 2):
inconsistency = _InconsistentSubsetSupport(pair[0], pair[1], subsetcps)
if inconsistency > worst[2]:
worst = (pair[0], pair[1], inconsistency)
return worst
示例13: _find_skeleton
# 需要导入模块: import itertools [as 别名]
# 或者: from itertools import combinations [as 别名]
def _find_skeleton(self, data, variable_types):
"""
For each pair of nodes, run a conditional independence test over
larger and larger conditioning sets to try to find a set that
d-separates the pair. If such a set exists, cut the edge between
the nodes. If not, keep the edge.
"""
self.separating_sets = {}
if not self.max_k:
self.max_k = len(self._g.nodes) + 1
for N in range(self.max_k + 1):
for (x, y) in list(self._g.edges()):
x_neighbors = list(self._g.neighbors(x))
y_neighbors = list(self._g.neighbors(y))
z_candidates = list(set(x_neighbors + y_neighbors) - set([x, y]))
for z in itertools.combinations(z_candidates, N):
test = self.independence_test([y], [x], list(z), data, self.alpha)
if test.independent():
self._g.remove_edge(x, y)
self.separating_sets[(x, y)] = z
break
示例14: create_cell_generator
# 需要导入模块: import itertools [as 别名]
# 或者: from itertools import combinations [as 别名]
def create_cell_generator(num_nodes):
h_connections = [
Bool(name='in_%d_%d' % (in_id, out_id))
for (in_id, out_id) in itertools.combinations(range(num_nodes + 2), 2)
]
cell_ops = [
D(['conv1', 'conv3', 'max3'], name='node_%d' % i)
for i in range(num_nodes)
]
def generate(filters):
return cell(
lambda channels: mo.siso_sequential(
[conv2d(D([channels]), D([1])),
batch_normalization(),
relu()]), lambda num_inputs, node_id, channels:
intermediate_node_fn(num_inputs, node_id, channels, cell_ops),
concat, h_connections, 5, filters)
return generate
示例15: test_count_nonzero_axis_consistent
# 需要导入模块: import itertools [as 别名]
# 或者: from itertools import combinations [as 别名]
def test_count_nonzero_axis_consistent(self):
# Check that the axis behaviour for valid axes in
# non-special cases is consistent (and therefore
# correct) by checking it against an integer array
# that is then casted to the generic object dtype
from itertools import combinations, permutations
axis = (0, 1, 2, 3)
size = (5, 5, 5, 5)
msg = "Mismatch for axis: %s"
rng = np.random.RandomState(1234)
m = rng.randint(-100, 100, size=size)
n = m.astype(object)
for length in range(len(axis)):
for combo in combinations(axis, length):
for perm in permutations(combo):
assert_equal(
np.count_nonzero(m, axis=perm),
np.count_nonzero(n, axis=perm),
err_msg=msg % (perm,))