本文整理汇总了Python中gurobipy.Model.setAttr方法的典型用法代码示例。如果您正苦于以下问题:Python Model.setAttr方法的具体用法?Python Model.setAttr怎么用?Python Model.setAttr使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类gurobipy.Model
的用法示例。
在下文中一共展示了Model.setAttr方法的1个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: __init__
# 需要导入模块: from gurobipy import Model [as 别名]
# 或者: from gurobipy.Model import setAttr [as 别名]
class HardEM:
# class parameters and their default values
EM_RESTARTS = 16
EM_ITERATION_LIMIT = 30
LP_TIME_LIMIT = 60
LP_ITERATION_LIMIT = 50 * (10 ** 3)
LP_VERBOSITY = 2
DFLT_NPARTS = 5
DEFLT_TAU = 0.7
CLUSTER_SIGMA = 0.6
PROD_PRIOR_ALPHA = 10
CLUSTER_PRIOR_ALPHA = 10
DENOM_THRESH = 3
def __init__(self, author_graph, author_product_map, nparts=DFLT_NPARTS, init_partition=None, TAU=DEFLT_TAU, parallel=False):
self.parts = range(nparts)
self.TAU = TAU
self.author_graph = author_graph
self.author_product_map = author_product_map
# if run in parallel set the random seed to pids. Otherwise, all instances will have same seed based on time
if parallel:
random.seed(os.getpid())
np.random.seed(os.getpid())
# assert numerical values for node ids
assert set(author_graph) == set(range(len(author_graph)))
self._lp_inited = False
# init hidden vars
if init_partition:
self.partition = np.array(init_partition, dtype=np.int8)
self.rand_init_param()
self.m_step() # so thetas have value
else:
self.partition = np.zeros(len(self.author_graph), dtype=np.int8)
self.rand_init_param()
@staticmethod
def _relabel_to_int_product_ids(mapping):
new_map = {}
label_map = {}
for k, vs in mapping.items():
new_vs = []
for v in vs:
if v not in label_map:
label_map[v] = len(label_map)
new_vs.append(label_map[v])
new_map[k] = np.array(new_vs)
return new_map
def rand_init_param(self):
logging.debug('Random param with seed: %s' % os.getpid())
self.factors = [list() for _ in self.parts]
# init cluster prior
for p, prob in enumerate(dirichlet([HardEM.CLUSTER_PRIOR_ALPHA] * len(self.parts))):
self.factors[p].append(ClusterPrior(prob))
# init other singleton potential factors
for p in self.parts:
factors = self.factors[p]
# factors.append(Binary_FC('isRealName', self.author_graph))
# factors.append(Norm_FC('revLen', self.author_graph, (3, 7)))
factors.append(ProdsFC('prProds', self.author_graph, self.author_product_map))
factors.append(MembsFC('prMembs', self.author_graph))
def _init_LP(self):
if self._lp_inited:
return
logging.debug('Init LP')
self.lp = LPModel('estep')
self.lp.setAttr("modelSense", 1) # minimzation
self.alpha = {}
beta2 = {}
beta3 = {}
# instantiate vars
logging.debug('Init LP - create vars')
for a in self.author_graph:
self.alpha[a] = {}
for p in self.parts:
self.alpha[a][p] = self.lp.addVar(lb=0.0)
for a, b in self.author_graph.edges():
beta2[(a, b)] = self.lp.addVar()
beta3[(a, b)] = {}
for p in self.parts:
beta3[(a, b)][p] = self.lp.addVar(lb=0.0)
# integrate added variables into the model
self.lp.update()
# add constraints once during this init
# alphas are indicator vars
logging.debug('Init LP - indiv constraints')
ones_arr = [1.0] * len(self.parts)
for a in self.author_graph:
self.lp.addConstr(LinExpr(ones_arr, self.alpha[a].values()), GRB.EQUAL, 1.0)
# beta2 is the sum of beta3s
logging.debug('Init LP - pair constraints')
pt_five_array = [0.5] * len(self.parts)
for a, b in self.author_graph.edges():
self.lp.addConstr(LinExpr(pt_five_array, beta3[(a, b)].values()), GRB.EQUAL, beta2[(a, b)])
for p in self.parts:
self.lp.addConstr(LinExpr([1.0, -1.0], [self.alpha[a][p], self.alpha[b][p]]), GRB.LESS_EQUAL, beta3[(a, b)][p])
self.lp.addConstr(LinExpr([-1.0, 1.0], [self.alpha[a][p], self.alpha[b][p]]), GRB.LESS_EQUAL, beta3[(a, b)][p])
#.........这里部分代码省略.........