本文整理汇总了Python中freppledb.execute.models.Task类的典型用法代码示例。如果您正苦于以下问题:Python Task类的具体用法?Python Task怎么用?Python Task使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
在下文中一共展示了Task类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: Command
class Command(BaseCommand):
help = '''
Update the ERP system with frePPLe planning information.
'''
# For the display in the execution screen
title = _('Export data to %(erp)s') % {'erp': 'erp'}
# For the display in the execution screen
index = 1500
requires_system_checks = False
def get_version(self):
return VERSION
def add_arguments(self, parser):
parser.add_argument(
'--user', help='User running the command'
)
parser.add_argument(
'--database', default=DEFAULT_DB_ALIAS,
help='Nominates the frePPLe database to load'
)
parser.add_argument(
'--task', type=int,
help='Task identifier (generated automatically if not provided)'
)
@ staticmethod
def getHTML(request):
if 'freppledb.erpconnection' in settings.INSTALLED_APPS:
context = RequestContext(request)
template = Template('''
{% load i18n %}
<form role="form" method="post" action="{{request.prefix}}/execute/launch/erp2frepple/">{% csrf_token %}
<table>
<tr>
<td style="vertical-align:top; padding: 15px">
<button class="btn btn-primary" type="submit" value="{% trans "launch"|capfirst %}">{% trans "launch"|capfirst %}</button>
</td>
<td style="padding: 0px 15px;">{% trans "Export erp data to frePPLe." %}
</td>
</tr>
</table>
</form>
''')
return template.render(context)
else:
return None
def handle(self, **options):
'''
Uploads approved operationplans to the ERP system.
'''
# Select the correct frePPLe scenario database
self.database = options['database']
if self.database not in settings.DATABASES.keys():
raise CommandError("No database settings known for '%s'" % self.database)
self.cursor_frepple = connections[self.database].cursor()
# FrePPle user running this task
if options['user']:
try:
self.user = User.objects.all().using(self.database).get(username=options['user'])
except:
raise CommandError("User '%s' not found" % options['user'] )
else:
self.user = None
# FrePPLe task identifier
if options['task']:
try:
self.task = Task.objects.all().using(self.database).get(pk=options['task'])
except:
raise CommandError("Task identifier not found")
if self.task.started or self.task.finished or self.task.status != "Waiting" or self.task.name != 'frepple2erp':
raise CommandError("Invalid task identifier")
else:
now = datetime.now()
self.task = Task(name='frepple2erp', submitted=now, started=now, status='0%', user=self.user)
self.task.processid = os.getpid()
self.task.save(using=self.database)
try:
# Open database connection
print("Connecting to the ERP database")
with getERPconnection() as erp_connection:
self.cursor_erp = erp_connection.cursor(self.database)
try:
self.extractPurchaseOrders()
self.task.status = '33%'
self.task.save(using=self.database)
self.extractDistributionOrders()
self.task.status = '66%'
#.........这里部分代码省略.........
示例2: handle
def handle(self, *args, **options):
# Pick up the options
if 'database' in options:
database = options['database'] or DEFAULT_DB_ALIAS
else:
database = DEFAULT_DB_ALIAS
if not database in settings.DATABASES:
raise CommandError("No database settings known for '%s'" % database )
if 'user' in options and options['user']:
try:
user = User.objects.all().using(database).get(username=options['user'])
except:
raise CommandError("User '%s' not found" % options['user'] )
else:
user = None
now = datetime.now()
transaction.enter_transaction_management(using=database)
task = None
try:
# Initialize the task
if 'task' in options and options['task']:
try:
task = Task.objects.all().using(database).get(pk=options['task'])
except:
raise CommandError("Task identifier not found")
if task.started or task.finished or task.status != "Waiting" or task.name != 'load XML file':
raise CommandError("Invalid task identifier")
task.status = '0%'
task.started = now
else:
task = Task(name='load XML file', submitted=now, started=now, status='0%', user=user)
task.arguments = ' '.join(['"%s"' % i for i in args])
task.save(using=database)
transaction.commit(using=database)
if not args:
raise CommandError("No XML input file given")
# Execute
# TODO: if frePPLe is available as a module, we don't really need to spawn another process.
os.environ['FREPPLE_HOME'] = settings.FREPPLE_HOME.replace('\\', '\\\\')
os.environ['FREPPLE_APP'] = settings.FREPPLE_APP
os.environ['FREPPLE_DATABASE'] = database
os.environ['PATH'] = settings.FREPPLE_HOME + os.pathsep + os.environ['PATH'] + os.pathsep + settings.FREPPLE_APP
os.environ['LD_LIBRARY_PATH'] = settings.FREPPLE_HOME
if 'DJANGO_SETTINGS_MODULE' not in os.environ.keys():
os.environ['DJANGO_SETTINGS_MODULE'] = 'freppledb.settings'
if os.path.exists(os.path.join(os.environ['FREPPLE_HOME'], 'python27.zip')):
# For the py2exe executable
os.environ['PYTHONPATH'] = os.path.join(os.environ['FREPPLE_HOME'], 'python27.zip') + ';' + os.path.normpath(os.environ['FREPPLE_APP'])
else:
# Other executables
os.environ['PYTHONPATH'] = os.path.normpath(os.environ['FREPPLE_APP'])
cmdline = [ '"%s"' % i for i in args ]
cmdline.insert(0, 'frepple')
cmdline.append( '"%s"' % os.path.join(settings.FREPPLE_APP, 'freppledb', 'execute', 'loadxml.py') )
ret = os.system(' '.join(cmdline))
if ret:
raise Exception('Exit code of the batch run is %d' % ret)
# Task update
task.status = 'Done'
task.finished = datetime.now()
except Exception as e:
if task:
task.status = 'Failed'
task.message = '%s' % e
task.finished = datetime.now()
raise e
finally:
if task:
task.save(using=database)
try:
transaction.commit(using=database)
except:
pass
transaction.leave_transaction_management(using=database)
示例3: handle
def handle(self, **options):
# Make sure the debug flag is not set!
# When it is set, the django database wrapper collects a list of all sql
# statements executed and their timings. This consumes plenty of memory
# and cpu time.
tmp_debug = settings.DEBUG
settings.DEBUG = False
# Pick up the options
if 'start' in options:
start = options['start'] or '2011-1-1'
else:
start = '2011-1-1'
if 'end' in options:
end = options['end'] or '2019-1-1'
else:
end = '2019-1-1'
if 'weekstart' in options:
weekstart = int(options['weekstart'])
if weekstart < 0 or weekstart > 6:
raise CommandError("Invalid weekstart %s" % weekstart)
else:
weekstart = 1
if 'database' in options:
database = options['database'] or DEFAULT_DB_ALIAS
else:
database = DEFAULT_DB_ALIAS
if database not in settings.DATABASES:
raise CommandError("No database settings known for '%s'" % database )
if 'user' in options and options['user']:
try:
user = User.objects.all().using(database).get(username=options['user'])
except:
raise CommandError("User '%s' not found" % options['user'] )
else:
user = None
now = datetime.now()
task = None
try:
# Initialize the task
if 'task' in options and options['task']:
try:
task = Task.objects.all().using(database).get(pk=options['task'])
except:
raise CommandError("Task identifier not found")
if task.started or task.finished or task.status != "Waiting" or task.name != 'generate buckets':
raise CommandError("Invalid task identifier")
task.status = '0%'
task.started = now
else:
task = Task(name='generate buckets', submitted=now, started=now, status='0%', user=user, arguments="--start=%s --end=%s --weekstart=%s" % (start, end, weekstart))
task.save(using=database)
# Validate the date arguments
try:
curdate = datetime.strptime(start, '%Y-%m-%d')
enddate = datetime.strptime(end, '%Y-%m-%d')
except Exception as e:
raise CommandError("Date is not matching format YYYY-MM-DD")
with transaction.atomic(using=database, savepoint=False):
# Delete previous contents
connections[database].cursor().execute(
"delete from common_bucketdetail where bucket_id in ('year','quarter','month','week','day')"
)
connections[database].cursor().execute(
"delete from common_bucket where name in ('year','quarter','month','week','day')"
)
# Create buckets
y = Bucket(name='year', description='Yearly time buckets', level=1)
q = Bucket(name='quarter', description='Quarterly time buckets', level=2)
m = Bucket(name='month', description='Monthly time buckets', level=3)
w = Bucket(name='week', description='Weeky time buckets', level=4)
d = Bucket(name='day', description='Daily time buckets', level=5)
y.save(using=database)
q.save(using=database)
m.save(using=database)
w.save(using=database)
d.save(using=database)
# Loop over all days in the chosen horizon
prev_year = None
prev_quarter = None
prev_month = None
prev_week = None
while curdate < enddate:
month = int(curdate.strftime("%m")) # an integer in the range 1 - 12
quarter = (month - 1) // 3 + 1 # an integer in the range 1 - 4
year = int(curdate.strftime("%Y"))
dayofweek = int(curdate.strftime("%w")) # day of the week, 0 = sunday, 1 = monday, ...
year_start = datetime(year, 1, 1)
year_end = datetime(year + 1, 1, 1)
week_start = curdate - timedelta((dayofweek + 6) % 7 + 1 - weekstart)
week_end = curdate - timedelta((dayofweek + 6) % 7 - 6 - weekstart)
if week_start < year_start:
week_start = year_start
if week_end > year_end:
week_end = year_end
#.........这里部分代码省略.........
示例4: handle
def handle(self, **options):
# Pick up options
if 'database' in options:
database = options['database'] or DEFAULT_DB_ALIAS
else:
database = DEFAULT_DB_ALIAS
if not database in settings.DATABASES:
raise CommandError("No database settings known for '%s'" % database )
if 'user' in options and options['user']:
try:
user = User.objects.all().using(database).get(username=options['user'])
except:
raise CommandError("User '%s' not found" % options['user'] )
else:
user = None
if 'models' in options and options['models']:
models = options['models'].split(',')
else:
models = None
now = datetime.now()
task = None
try:
# Initialize the task
if 'task' in options and options['task']:
try:
task = Task.objects.all().using(database).get(pk=options['task'])
except:
raise CommandError("Task identifier not found")
if task.started or task.finished or task.status != "Waiting" or task.name != 'empty database':
raise CommandError("Invalid task identifier")
task.status = '0%'
task.started = now
else:
task = Task(name='empty database', submitted=now, started=now, status='0%', user=user)
task.save(using=database)
# Create a database connection
cursor = connections[database].cursor()
# Get a list of all django tables in the database
tables = set(connections[database].introspection.django_table_names(only_existing=True))
# Validate the user list of tables
if models:
models2tables = set()
for m in models:
try:
x = m.split('.', 1)
x = apps.get_model(x[0], x[1])
if x in EXCLUDE_FROM_BULK_OPERATIONS:
continue
x = x._meta.db_table
if not x in tables:
raise
models2tables.add(x)
except Exception as e:
raise CommandError("Invalid model to erase: %s" % m)
tables = models2tables
else:
for i in EXCLUDE_FROM_BULK_OPERATIONS:
tables.discard(i._meta.db_table)
# Some tables need to be handled a bit special
if "setupmatrix" in tables:
tables.add("setuprule")
tables.discard('auth_group_permissions')
tables.discard('auth_permission')
tables.discard('auth_group')
tables.discard('django_session')
tables.discard('common_user')
tables.discard('common_user_groups')
tables.discard('common_user_user_permissions')
tables.discard('django_admin_log')
tables.discard('django_content_type')
tables.discard('execute_log')
tables.discard('common_scenario')
# Delete all records from the tables.
with transaction.atomic(using=database, savepoint=False):
if "common_bucket" in tables:
cursor.execute('update common_user set horizonbuckets = null')
for stmt in connections[database].ops.sql_flush(no_style(), tables, []):
cursor.execute(stmt)
# Task update
task.status = 'Done'
task.finished = datetime.now()
task.save(using=database)
except Exception as e:
if task:
task.status = 'Failed'
task.message = '%s' % e
task.finished = datetime.now()
task.save(using=database)
raise CommandError('%s' % e)
示例5: handle
def handle(self, *args, **options):
# Make sure the debug flag is not set!
# When it is set, the django database wrapper collects a list of all sql
# statements executed and their timings. This consumes plenty of memory
# and cpu time.
tmp_debug = settings.DEBUG
settings.DEBUG = False
# Pick up options
if 'force' in options: force = options['force']
else: force = False
test = 'FREPPLE_TEST' in os.environ
if 'user' in options and options['user']:
try: user = User.objects.all().get(username=options['user'])
except: raise CommandError("User '%s' not found" % options['user'] )
else:
user = None
# Initialize the task
now = datetime.now()
task = None
if 'task' in options and options['task']:
try: task = Task.objects.all().get(pk=options['task'])
except: raise CommandError("Task identifier not found")
if task.started or task.finished or task.status != "Waiting" or task.name != 'copy scenario':
raise CommandError("Invalid task identifier")
task.status = '0%'
task.started = now
else:
task = Task(name='copy scenario', submitted=now, started=now, status='0%', user=user)
task.save()
# Synchronize the scenario table with the settings
Scenario.syncWithSettings()
# Validate the arguments
destinationscenario = None
try:
if len(args) != 2:
raise CommandError("Command takes exactly 2 arguments.")
task.arguments = "%s %s" % (args[0], args[1])
task.save()
source = args[0]
try:
sourcescenario = Scenario.objects.get(pk=source)
except:
raise CommandError("No source database defined with name '%s'" % source)
destination = args[1]
try:
destinationscenario = Scenario.objects.get(pk=destination)
except:
raise CommandError("No destination database defined with name '%s'" % destination)
if source == destination:
raise CommandError("Can't copy a schema on itself")
if settings.DATABASES[source]['ENGINE'] != settings.DATABASES[destination]['ENGINE']:
raise CommandError("Source and destination scenarios have a different engine")
if sourcescenario.status != u'In use':
raise CommandError("Source scenario is not in use")
if destinationscenario.status != u'Free' and not force:
raise CommandError("Destination scenario is not free")
# Logging message - always logging in the default database
destinationscenario.status = u'Busy'
destinationscenario.save()
# Copying the data
if settings.DATABASES[source]['ENGINE'] == 'django.db.backends.postgresql_psycopg2':
ret = os.system("pg_dump -c -U%s -Fp %s%s%s | psql -U%s %s%s%s" % (
settings.DATABASES[source]['USER'],
settings.DATABASES[source]['HOST'] and ("-h %s " % settings.DATABASES[source]['HOST']) or '',
settings.DATABASES[source]['PORT'] and ("-p %s " % settings.DATABASES[source]['PORT']) or '',
test and settings.DATABASES[source]['TEST_NAME'] or settings.DATABASES[source]['NAME'],
settings.DATABASES[destination]['USER'],
settings.DATABASES[destination]['HOST'] and ("-h %s " % settings.DATABASES[destination]['HOST']) or '',
settings.DATABASES[destination]['PORT'] and ("-p %s " % settings.DATABASES[destination]['PORT']) or '',
test and settings.DATABASES[destination]['TEST_NAME'] or settings.DATABASES[destination]['NAME'],
))
if ret: raise Exception('Exit code of the database copy command is %d' % ret)
elif settings.DATABASES[source]['ENGINE'] == 'django.db.backends.sqlite3':
# A plain copy of the database file
if test:
shutil.copy2(settings.DATABASES[source]['TEST_NAME'], settings.DATABASES[destination]['TEST_NAME'])
else:
shutil.copy2(settings.DATABASES[source]['NAME'], settings.DATABASES[destination]['NAME'])
elif settings.DATABASES[source]['ENGINE'] == 'django.db.backends.mysql':
ret = os.system("mysqldump %s --password=%s --user=%s %s%s--quick --compress --extended-insert --add-drop-table | mysql %s --password=%s --user=%s %s%s" % (
test and settings.DATABASES[source]['TEST_NAME'] or settings.DATABASES[source]['NAME'],
settings.DATABASES[source]['PASSWORD'],
settings.DATABASES[source]['USER'],
settings.DATABASES[source]['HOST'] and ("--host=%s " % settings.DATABASES[source]['HOST']) or '',
settings.DATABASES[source]['PORT'] and ("--port=%s " % settings.DATABASES[source]['PORT']) or '',
test and settings.DATABASES[destination]['TEST_NAME'] or settings.DATABASES[destination]['NAME'],
settings.DATABASES[destination]['PASSWORD'],
settings.DATABASES[destination]['USER'],
settings.DATABASES[destination]['HOST'] and ("--host=%s " % settings.DATABASES[destination]['HOST']) or '',
settings.DATABASES[destination]['PORT'] and ("--port=%s " % settings.DATABASES[destination]['PORT']) or '',
))
if ret: raise Exception('Exit code of the database copy command is %d' % ret)
elif settings.DATABASES[source]['ENGINE'] == 'django.db.backends.oracle':
try:
#.........这里部分代码省略.........
示例6: handle
def handle(self, **options):
# Make sure the debug flag is not set!
# When it is set, the django database wrapper collects a list of all sql
# statements executed and their timings. This consumes plenty of memory
# and cpu time.
tmp_debug = settings.DEBUG
settings.DEBUG = False
# Pick up the options
if 'verbosity' in options:
verbosity = int(options['verbosity'])
else:
verbosity = 1
if 'cluster' in options:
cluster = int(options['cluster'])
else:
cluster = 100
if 'demand' in options:
demand = int(options['demand'])
else:
demand = 30
if 'forecast_per_item' in options:
forecast_per_item = int(options['forecast_per_item'])
else:
forecast_per_item = 50
if 'level' in options:
level = int(options['level'])
else:
level = 5
if 'resource' in options:
resource = int(options['resource'])
else:
resource = 60
if 'resource_size' in options:
resource_size = int(options['resource_size'])
else:
resource_size = 5
if 'components' in options:
components = int(options['components'])
else:
components = 200
if 'components_per' in options:
components_per = int(options['components_per'])
else:
components_per = 5
if components == 0:
components_per = 0
if 'deliver_lt' in options:
deliver_lt = int(options['deliver_lt'])
else:
deliver_lt = 30
if 'procure_lt' in options:
procure_lt = int(options['procure_lt'])
else:
procure_lt = 40
if 'currentdate' in options:
currentdate = options['currentdate'] or datetime.strftime(date.today(), '%Y-%m-%d')
else:
currentdate = datetime.strftime(date.today(), '%Y-%m-%d')
if 'database' in options:
database = options['database'] or DEFAULT_DB_ALIAS
else:
database = DEFAULT_DB_ALIAS
if not database in settings.DATABASES:
raise CommandError("No database settings known for '%s'" % database )
if 'user' in options and options['user']:
try:
user = User.objects.all().using(database).get(username=options['user'])
except:
raise CommandError("User '%s' not found" % options['user'] )
else:
user = None
random.seed(100) # Initialize random seed to get reproducible results
now = datetime.now()
task = None
try:
# Initialize the task
if 'task' in options and options['task']:
try:
task = Task.objects.all().using(database).get(pk=options['task'])
except:
raise CommandError("Task identifier not found")
if task.started or task.finished or task.status != "Waiting" or task.name != 'generate model':
raise CommandError("Invalid task identifier")
task.status = '0%'
task.started = now
else:
task = Task(name='generate model', submitted=now, started=now, status='0%', user=user)
task.arguments = "--cluster=%s --demand=%s --forecast_per_item=%s --level=%s --resource=%s " \
"--resource_size=%s --components=%s --components_per=%s --deliver_lt=%s --procure_lt=%s" % (
cluster, demand, forecast_per_item, level, resource,
resource_size, components, components_per, deliver_lt, procure_lt
)
task.save(using=database)
transaction.commit(using=database)
# Pick up the startdate
try:
#.........这里部分代码省略.........
示例7: handle
def handle(self, **options):
# Pick up the options
now = datetime.now()
if 'database' in options:
database = options['database'] or DEFAULT_DB_ALIAS
else:
database = DEFAULT_DB_ALIAS
if database not in settings.DATABASES:
raise CommandError("No database settings known for '%s'" % database )
if 'user' in options and options['user']:
try:
user = User.objects.all().using(database).get(username=options['user'])
except:
raise CommandError("User '%s' not found" % options['user'] )
else:
user = None
timestamp = now.strftime("%Y%m%d%H%M%S")
if database == DEFAULT_DB_ALIAS:
logfile = 'frepple-%s.log' % timestamp
else:
logfile = 'frepple_%s-%s.log' % (database, timestamp)
task = None
try:
# Initialize the task
if 'task' in options and options['task']:
try:
task = Task.objects.all().using(database).get(pk=options['task'])
except:
raise CommandError("Task identifier not found")
if task.started or task.finished or task.status != "Waiting" or task.name not in ('runplan', 'frepple_run'):
raise CommandError("Invalid task identifier")
task.status = '0%'
task.started = now
task.logfile = logfile
else:
task = Task(name='runplan', submitted=now, started=now, status='0%', user=user, logfile=logfile)
# Validate options
if 'constraint' in options:
constraint = int(options['constraint'])
if constraint < 0 or constraint > 15:
raise ValueError("Invalid constraint: %s" % options['constraint'])
else:
constraint = 15
if 'plantype' in options:
plantype = int(options['plantype'])
else:
plantype = 1
# Reset environment variables
# TODO avoid having to delete the environment variables. Use options directly?
PlanTaskRegistry.autodiscover()
for i in PlanTaskRegistry.reg:
if 'env' in options:
# Options specified
if i.label and i.label[0] in os.environ:
del os.environ[i.label[0]]
elif i.label:
# No options specified - default to activate them all
os.environ[i.label[0]] = '1'
# Set environment variables
if options['env']:
task.arguments = "--constraint=%d --plantype=%d --env=%s" % (constraint, plantype, options['env'])
for i in options['env'].split(','):
j = i.split('=')
if len(j) == 1:
os.environ[j[0]] = '1'
else:
os.environ[j[0]] = j[1]
else:
task.arguments = "--constraint=%d --plantype=%d" % (constraint, plantype)
if options['background']:
task.arguments += " --background"
# Log task
# Different from the other tasks the frepple engine will write the processid
task.save(using=database)
# Locate commands.py
import freppledb.common.commands
cmd = freppledb.common.commands.__file__
def setlimits():
import resource
if settings.MAXMEMORYSIZE:
resource.setrlimit(
resource.RLIMIT_AS,
(settings.MAXMEMORYSIZE * 1024 * 1024, (settings.MAXMEMORYSIZE + 10) * 1024 * 1024)
)
if settings.MAXCPUTIME:
resource.setrlimit(
resource.RLIMIT_CPU,
(settings.MAXCPUTIME, settings.MAXCPUTIME + 5)
)
# Limiting the file size is a bit tricky as this limit not only applies to the log
# file, but also to temp files during the export
#.........这里部分代码省略.........
示例8: wrapTask
def wrapTask(request, action):
# Allow only post
if request.method != 'POST':
raise Exception('Only post requests allowed')
# Parse the posted parameters as arguments for an asynchronous task to add to the queue. TODO MAKE MODULAR WITH SEPERATE TASK CLASS
worker_database = request.database
now = datetime.now()
task = None
args = request.POST or request.GET
# A
if action in ('frepple_run', 'runplan'):
if not request.user.has_perm('auth.generate_plan'):
raise Exception('Missing execution privileges')
constraint = 0
for value in args.getlist('constraint'):
try:
constraint += int(value)
except:
pass
task = Task(name='runplan', submitted=now, status='Waiting', user=request.user)
task.arguments = "--constraint=%s --plantype=%s" % (constraint, args.get('plantype', 1))
env = []
for value in args.getlist('env'):
env.append(value)
if env:
task.arguments = "%s --env=%s" % (task.arguments, ','.join(env))
task.save(using=request.database)
# C
elif action in ('frepple_flush', 'empty'):
if not request.user.has_perm('auth.run_db'):
raise Exception('Missing execution privileges')
task = Task(name='empty', submitted=now, status='Waiting', user=request.user)
models = ','.join(args.getlist('models'))
if models:
task.arguments = "--models=%s" % (models)
task.save(using=request.database)
# D
elif action == 'loaddata':
if not request.user.has_perm('auth.run_db'):
raise Exception('Missing execution privileges')
task = Task(name='loaddata', submitted=now, status='Waiting', user=request.user, arguments=args['fixture'])
task.save(using=request.database)
# Also run the workflow upon loading of manufacturing_demo or distribution_demo
if (args['regenerateplan'] == 'true'):
active_modules = 'supply'
task = Task(name='runplan', submitted=now, status='Waiting', user=request.user)
task.arguments = "--constraint=15 --plantype=1 --env=%s --background" % (active_modules,)
task.save(using=request.database)
# E
elif action in ('frepple_copy', 'scenario_copy'):
worker_database = DEFAULT_DB_ALIAS
if 'copy' in args:
if not request.user.has_perm('auth.copy_scenario'):
raise Exception('Missing execution privileges')
source = args.get('source', DEFAULT_DB_ALIAS)
worker_database = source
destination = args.getlist('destination')
force = args.get('force', False)
for sc in Scenario.objects.using(DEFAULT_DB_ALIAS):
arguments = "%s %s" % (source, sc.name)
if force:
arguments += ' --force'
if args.get(sc.name, 'off') == 'on' or sc.name in destination:
task = Task(name='scenario_copy', submitted=now, status='Waiting', user=request.user, arguments=arguments)
task.save(using=source)
elif 'release' in args:
# Note: release is immediate and synchronous.
if not request.user.has_perm('auth.release_scenario'):
raise Exception('Missing execution privileges')
for sc in Scenario.objects.using(DEFAULT_DB_ALIAS):
if args.get(sc.name, 'off') == 'on' and sc.status != 'Free':
sc.status = 'Free'
sc.lastrefresh = now
sc.save(using=DEFAULT_DB_ALIAS)
if request.database == sc.name:
# Erasing the database that is currently selected.
request.prefix = ''
elif 'update' in args:
# Note: update is immediate and synchronous.
if not request.user.has_perm('auth.release_scenario'):
raise Exception('Missing execution privileges')
for sc in Scenario.objects.using(DEFAULT_DB_ALIAS):
if args.get(sc.name, 'off') == 'on':
sc.description = args.get('description', None)
sc.save(using=DEFAULT_DB_ALIAS)
else:
raise Exception('Invalid scenario task')
# G
elif action in ('frepple_createbuckets', 'createbuckets'):
if not request.user.has_perm('auth.run_db'):
raise Exception('Missing execution privileges')
task = Task(name='createbuckets', submitted=now, status='Waiting', user=request.user)
arguments = []
start = args.get('start', None)
if start:
arguments.append("--start=%s" % start)
end = args.get('end', None)
if end:
#.........这里部分代码省略.........
示例9: LaunchTask
def LaunchTask(request, action):
# Allow only post
if request.method != 'POST':
raise Http404('Only post requests allowed')
# Parse the posted parameters as arguments for an asynchronous task to add to the queue. TODO MAKE MODULAR WITH SEPERATE TASK CLASS
worker_database = request.database
try:
now = datetime.now()
# A
if action == 'generate plan':
constraint = 0
for value in request.POST.getlist('constraint'):
try: constraint += int(value)
except: pass
task = Task(name='generate plan', submitted=now, status='Waiting', user=request.user)
task.arguments = "--constraint=%s --plantype=%s" % (constraint, request.POST.get('plantype'))
task.save(using=request.database)
# Update the session object TODO REPLACE WITH PREFERENCE INFO
request.session['plantype'] = request.POST.get('plantype')
request.session['constraint'] = constraint
# B
elif action == 'generate model':
task = Task(name='generate model', submitted=now, status='Waiting', user=request.user)
task.arguments = "--cluster=%s --demand=%s --forecast_per_item=%s --level=%s --resource=%s " \
"--resource_size=%s --components=%s --components_per=%s --deliver_lt=%s --procure_lt=%s" % (
request.POST['clusters'], request.POST['demands'], request.POST['fcst'], request.POST['levels'],
request.POST['rsrc_number'], request.POST['rsrc_size'], request.POST['components'],
request.POST['components_per'], request.POST['deliver_lt'], request.POST['procure_lt']
)
task.save(using=request.database)
# C
elif action == 'empty database':
task = Task(name='empty database', submitted=now, status='Waiting', user=request.user)
task.save(using=request.database)
# D
elif action == 'load dataset':
task = Task(name='load dataset', submitted=now, status='Waiting', user=request.user, arguments=request.POST['datafile'])
task.save(using=request.database)
# E
elif action == 'manage scenarios':
worker_database = DEFAULT_DB_ALIAS
if 'copy' in request.POST:
source = request.POST.get('source', DEFAULT_DB_ALIAS)
for sc in Scenario.objects.all():
if request.POST.get(sc.name,'off') == 'on' and sc.status == u'Free':
task = Task(name='copy scenario', submitted=now, status='Waiting', user=request.user, arguments="%s %s" % (source, sc.name))
task.save()
elif 'release' in request.POST:
# Note: release is immediate and synchronous.
for sc in Scenario.objects.all():
if request.POST.get(sc.name,'off') == u'on' and sc.status != u'Free':
sc.status = u'Free'
sc.lastrefresh = now
sc.save()
if request.database == sc.name:
# Erasing the database that is currently selected.
request.prefix = ''
elif 'update' in request.POST:
# Note: update is immediate and synchronous.
for sc in Scenario.objects.all():
if request.POST.get(sc.name, 'off') == 'on':
sc.description = request.POST.get('description',None)
sc.save()
else:
raise Http404('Invalid scenario task')
# F
elif action == 'backup database':
task = Task(name='backup database', submitted=now, status='Waiting', user=request.user)
task.save(using=request.database)
# G
elif action == 'generate buckets':
task = Task(name='generate buckets', submitted=now, status='Waiting', user=request.user)
task.arguments = "--start=%s --end=%s --weekstart=%s" % (
request.POST['start'], request.POST['end'], request.POST['weekstart']
)
task.save(using=request.database)
# H
elif action == 'exportworkbook':
return exportWorkbook(request)
# I
elif action == 'importworkbook':
return importWorkbook(request)
# J
elif action == 'openbravo_import' and 'freppledb.openbravo' in settings.INSTALLED_APPS:
task = Task(name='Openbravo import', submitted=now, status='Waiting', user=request.user)
task.arguments = "--delta=%s" % request.POST['delta']
task.save(using=request.database)
# K
elif action == 'openbravo_export' and 'freppledb.openbravo' in settings.INSTALLED_APPS:
task = Task(name='Openbravo export', submitted=now, status='Waiting', user=request.user)
task.save(using=request.database)
# L
elif action == 'openerp_import' and 'freppledb.openerp' in settings.INSTALLED_APPS:
task = Task(name='OpenERP import', submitted=now, status='Waiting', user=request.user)
task.arguments = "--delta=%s" % request.POST['delta']
task.save(using=request.database)
# M
elif action == 'openerp_export' and 'freppledb.openerp' in settings.INSTALLED_APPS:
task = Task(name='OpenERP export', submitted=now, status='Waiting', user=request.user)
#.........这里部分代码省略.........
示例10: handle
def handle(self, **options):
# Make sure the debug flag is not set!
# When it is set, the django database wrapper collects a list of all sql
# statements executed and their timings. This consumes plenty of memory
# and cpu time.
tmp_debug = settings.DEBUG
settings.DEBUG = False
# Pick up options
force = options['force']
test = 'FREPPLE_TEST' in os.environ
if options['user']:
try:
user = User.objects.all().get(username=options['user'])
except:
raise CommandError("User '%s' not found" % options['user'] )
else:
user = None
# Synchronize the scenario table with the settings
Scenario.syncWithSettings()
# Initialize the task
source = options['source']
try:
sourcescenario = Scenario.objects.using(DEFAULT_DB_ALIAS).get(pk=source)
except:
raise CommandError("No source database defined with name '%s'" % source)
now = datetime.now()
task = None
if 'task' in options and options['task']:
try:
task = Task.objects.all().using(source).get(pk=options['task'])
except:
raise CommandError("Task identifier not found")
if task.started or task.finished or task.status != "Waiting" or task.name not in ('frepple_copy', 'scenario_copy'):
raise CommandError("Invalid task identifier")
task.status = '0%'
task.started = now
else:
task = Task(name='scenario_copy', submitted=now, started=now, status='0%', user=user)
task.processid = os.getpid()
task.save(using=source)
# Validate the arguments
destination = options['destination']
destinationscenario = None
try:
task.arguments = "%s %s" % (source, destination)
if options['description']:
task.arguments += '--description="%s"' % options['description'].replace('"', '\\"')
if force:
task.arguments += " --force"
task.save(using=source)
try:
destinationscenario = Scenario.objects.using(DEFAULT_DB_ALIAS).get(pk=destination)
except:
raise CommandError("No destination database defined with name '%s'" % destination)
if source == destination:
raise CommandError("Can't copy a schema on itself")
if settings.DATABASES[source]['ENGINE'] != settings.DATABASES[destination]['ENGINE']:
raise CommandError("Source and destination scenarios have a different engine")
if sourcescenario.status != 'In use':
raise CommandError("Source scenario is not in use")
if destinationscenario.status != 'Free' and not force:
raise CommandError("Destination scenario is not free")
# Logging message - always logging in the default database
destinationscenario.status = 'Busy'
destinationscenario.save(using=DEFAULT_DB_ALIAS)
# Copying the data
# Commenting the next line is a little more secure, but requires you to create a .pgpass file.
if settings.DATABASES[source]['PASSWORD']:
os.environ['PGPASSWORD'] = settings.DATABASES[source]['PASSWORD']
if os.name == 'nt':
# On windows restoring with pg_restore over a pipe is broken :-(
cmd = "pg_dump -c -Fp %s%s%s%s | psql %s%s%s%s"
else:
cmd = "pg_dump -Fc %s%s%s%s | pg_restore -n public -Fc -c --if-exists %s%s%s -d %s"
commandline = cmd % (
settings.DATABASES[source]['USER'] and ("-U %s " % settings.DATABASES[source]['USER']) or '',
settings.DATABASES[source]['HOST'] and ("-h %s " % settings.DATABASES[source]['HOST']) or '',
settings.DATABASES[source]['PORT'] and ("-p %s " % settings.DATABASES[source]['PORT']) or '',
test and settings.DATABASES[source]['TEST']['NAME'] or settings.DATABASES[source]['NAME'],
settings.DATABASES[destination]['USER'] and ("-U %s " % settings.DATABASES[destination]['USER']) or '',
settings.DATABASES[destination]['HOST'] and ("-h %s " % settings.DATABASES[destination]['HOST']) or '',
settings.DATABASES[destination]['PORT'] and ("-p %s " % settings.DATABASES[destination]['PORT']) or '',
test and settings.DATABASES[destination]['TEST']['NAME'] or settings.DATABASES[destination]['NAME'],
)
with subprocess.Popen(commandline, shell=True, stdout=subprocess.DEVNULL, stderr=subprocess.STDOUT) as p:
try:
task.processid = p.pid
task.save(using=source)
p.wait()
except:
p.kill()
p.wait()
# Consider the destination database free again
destinationscenario.status = 'Free'
#.........这里部分代码省略.........
示例11: handle
def handle(self, **options):
# Make sure the debug flag is not set!
# When it is set, the django database wrapper collects a list of all sql
# statements executed and their timings. This consumes plenty of memory
# and cpu time.
tmp_debug = settings.DEBUG
settings.DEBUG = False
# Pick up options
if 'database' in options:
database = options['database'] or DEFAULT_DB_ALIAS
else:
database = DEFAULT_DB_ALIAS
if not database in settings.DATABASES:
raise CommandError("No database settings known for '%s'" % database )
if 'user' in options and options['user']:
try: user = User.objects.all().using(database).get(username=options['user'])
except: raise CommandError("User '%s' not found" % options['user'] )
else:
user = None
now = datetime.now()
transaction.enter_transaction_management(using=database)
task = None
try:
# Initialize the task
if 'task' in options and options['task']:
try: task = Task.objects.all().using(database).get(pk=options['task'])
except: raise CommandError("Task identifier not found")
if task.started or task.finished or task.status != "Waiting" or task.name != 'empty database':
raise CommandError("Invalid task identifier")
task.status = '0%'
task.started = now
else:
task = Task(name='empty database', submitted=now, started=now, status='0%', user=user)
task.save(using=database)
transaction.commit(using=database)
# Create a database connection
cursor = connections[database].cursor()
# Get a list of all django tables in the database
tables = set(connections[database].introspection.django_table_names(only_existing=True))
# Some tables need to be handled a bit special
cursor.execute('update common_user set horizonbuckets = null')
tables.discard('auth_group_permissions')
tables.discard('auth_permission')
tables.discard('auth_group')
tables.discard('django_session')
tables.discard('common_user')
tables.discard('common_user_groups')
tables.discard('common_user_user_permissions')
tables.discard('django_content_type')
tables.discard('execute_log')
tables.discard('execute_scenario')
transaction.commit(using=database)
# Delete all records from the tables.
for stmt in connections[database].ops.sql_flush(no_style(), tables, []):
cursor.execute(stmt)
transaction.commit(using=database)
# SQLite specials
if settings.DATABASES[database]['ENGINE'] == 'django.db.backends.sqlite3':
cursor.execute('vacuum') # Shrink the database file
# Task update
task.status = 'Done'
task.finished = datetime.now()
except Exception as e:
if task:
task.status = 'Failed'
task.message = '%s' % e
task.finished = datetime.now()
raise e
finally:
if task: task.save(using=database)
try: transaction.commit(using=database)
except: pass
settings.DEBUG = tmp_debug
transaction.leave_transaction_management(using=database)
示例12: handle
def handle(self, **options):
# Pick up the options
now = datetime.now()
self.database = options['database']
if self.database not in settings.DATABASES:
raise CommandError("No database settings known for '%s'" % self.database )
if options['user']:
try:
self.user = User.objects.all().using(self.database).get(username=options['user'])
except:
raise CommandError("User '%s' not found" % options['user'] )
else:
self.user = None
timestamp = now.strftime("%Y%m%d%H%M%S")
if self.database == DEFAULT_DB_ALIAS:
logfile = 'importworkbook-%s.log' % timestamp
else:
logfile = 'importworkbook_%s-%s.log' % (self.database, timestamp)
task = None
try:
setattr(_thread_locals, 'database', self.database)
# Initialize the task
if options['task']:
try:
task = Task.objects.all().using(self.database).get(pk=options['task'])
except:
raise CommandError("Task identifier not found")
if task.started or task.finished or task.status != "Waiting" or task.name not in ('frepple_importworkbook', 'importworkbook'):
raise CommandError("Invalid task identifier")
task.status = '0%'
task.started = now
else:
task = Task(name='importworkbook', submitted=now, started=now, status='0%', user=self.user)
task.arguments = ' '.join(options['file'])
task.save(using=self.database)
all_models = [ (ct.model_class(), ct.pk) for ct in ContentType.objects.all() if ct.model_class() ]
try:
with transaction.atomic(using=self.database):
# Find all models in the workbook
for file in filename:
wb = load_workbook(filename=file, read_only=True, data_only=True)
models = []
for ws_name in wb.sheetnames:
# Find the model
model = None
contenttype_id = None
for m, ct in all_models:
if matchesModelName(ws_name, m):
model = m
contenttype_id = ct
break
if not model or model in EXCLUDE_FROM_BULK_OPERATIONS:
print(force_text(_("Ignoring data in worksheet: %s") % ws_name))
# yield '<div class="alert alert-warning">' + force_text(_("Ignoring data in worksheet: %s") % ws_name) + '</div>'
elif not self.user.has_perm('%s.%s' % (model._meta.app_label, get_permission_codename('add', model._meta))):
# Check permissions
print(force_text(_("You don't permissions to add: %s") % ws_name))
# yield '<div class="alert alert-danger">' + force_text(_("You don't permissions to add: %s") % ws_name) + '</div>'
else:
deps = set([model])
GridReport.dependent_models(model, deps)
models.append( (ws_name, model, contenttype_id, deps) )
# Sort the list of models, based on dependencies between models
models = GridReport.sort_models(models)
print('197----', models)
# Process all rows in each worksheet
for ws_name, model, contenttype_id, dependencies in models:
print(force_text(_("Processing data in worksheet: %s") % ws_name))
# yield '<strong>' + force_text(_("Processing data in worksheet: %s") % ws_name) + '</strong><br>'
# yield ('<div class="table-responsive">'
# '<table class="table table-condensed" style="white-space: nowrap;"><tbody>')
numerrors = 0
numwarnings = 0
firsterror = True
ws = wb[ws_name]
for error in parseExcelWorksheet(model, ws, user=self.user, database=self.database, ping=True):
if error[0] == DEBUG:
# Yield some result so we can detect disconnect clients and interrupt the upload
# yield ' '
continue
if firsterror and error[0] in (ERROR, WARNING):
print('%s %s %s %s %s%s%s' % (
capfirst(_("worksheet")), capfirst(_("row")),
capfirst(_("field")), capfirst(_("value")),
capfirst(_("error")), " / ", capfirst(_("warning"))
))
# yield '<tr><th class="sr-only">%s</th><th>%s</th><th>%s</th><th>%s</th><th>%s%s%s</th></tr>' % (
# capfirst(_("worksheet")), capfirst(_("row")),
# capfirst(_("field")), capfirst(_("value")),
# capfirst(_("error")), " / ", capfirst(_("warning"))
# )
firsterror = False
if error[0] == ERROR:
print('%s %s %s %s %s: %s' % (
ws_name,
error[1] if error[1] else '',
error[2] if error[2] else '',
#.........这里部分代码省略.........
示例13: handle
def handle(self, **options):
'''
Uploads approved operationplans to the ERP system.
'''
# Select the correct frePPLe scenario database
self.database = options['database']
if self.database not in settings.DATABASES.keys():
raise CommandError("No database settings known for '%s'" % self.database)
self.cursor_frepple = connections[self.database].cursor()
# FrePPle user running this task
if options['user']:
try:
self.user = User.objects.all().using(self.database).get(username=options['user'])
except:
raise CommandError("User '%s' not found" % options['user'] )
else:
self.user = None
# FrePPLe task identifier
if options['task']:
try:
self.task = Task.objects.all().using(self.database).get(pk=options['task'])
except:
raise CommandError("Task identifier not found")
if self.task.started or self.task.finished or self.task.status != "Waiting" or self.task.name != 'frepple2erp':
raise CommandError("Invalid task identifier")
else:
now = datetime.now()
self.task = Task(name='frepple2erp', submitted=now, started=now, status='0%', user=self.user)
self.task.processid = os.getpid()
self.task.save(using=self.database)
try:
# Open database connection
print("Connecting to the ERP database")
with getERPconnection() as erp_connection:
self.cursor_erp = erp_connection.cursor(self.database)
try:
self.extractPurchaseOrders()
self.task.status = '33%'
self.task.save(using=self.database)
self.extractDistributionOrders()
self.task.status = '66%'
self.task.save(using=self.database)
self.extractManufacturingOrders()
self.task.status = '100%'
self.task.save(using=self.database)
# Optional extra planning output the ERP might be interested in:
# - planned delivery date of sales orders
# - safety stock (Enterprise Edition only)
# - reorder quantities (Enterprise Edition only)
# - forecast (Enterprise Edition only)
self.task.status = 'Done'
finally:
self.cursor_erp.close()
except Exception as e:
self.task.status = 'Failed'
self.task.message = 'Failed: %s' % e
self.task.finished = datetime.now()
self.task.processid = None
self.task.save(using=self.database)
self.cursor_frepple.close()
示例14: handle
def handle(self, **options):
# Pick up the options
if 'verbosity' in options: self.verbosity = int(options['verbosity'] or '1')
else: self.verbosity = 1
if 'user' in options: user = options['user']
else: user = ''
if 'database' in options: self.database = options['database'] or DEFAULT_DB_ALIAS
else: self.database = DEFAULT_DB_ALIAS
if not self.database in settings.DATABASES.keys():
raise CommandError("No database settings known for '%s'" % self.database )
if 'delta' in options: self.delta = float(options['delta'] or '3650')
else: self.delta = 3650
self.date = datetime.now()
# Make sure the debug flag is not set!
# When it is set, the django database wrapper collects a list of all sql
# statements executed and their timings. This consumes plenty of memory
# and cpu time.
tmp_debug = settings.DEBUG
settings.DEBUG = False
now = datetime.now()
ac = transaction.get_autocommit(using=self.database)
transaction.set_autocommit(False, using=self.database)
task = None
try:
# Initialize the task
if 'task' in options and options['task']:
try: task = Task.objects.all().using(self.database).get(pk=options['task'])
except: raise CommandError("Task identifier not found")
if task.started or task.finished or task.status != "Waiting" or task.name != 'Odoo import':
raise CommandError("Invalid task identifier")
task.status = '0%'
task.started = now
else:
task = Task(name='Odoo import', submitted=now, started=now, status='0%', user=user,
arguments="--delta=%s" % self.delta)
task.save(using=self.database)
transaction.commit(using=self.database)
# Find the connector class
# We look for a module called "odoo_export" in each of the installed
# applications, and expect to find a class called connector in it
connector = None
for app in reversed(settings.INSTALLED_APPS):
try:
connector = getattr(import_module('%s.odoo_import' % app),'Connector')
except ImportError as e:
# Silently ignore if it's the module which isn't found in the app
if str(e) != 'No module named odoo_import': raise e
if not connector:
raise CommandError("No odoo_import connector found")
# Instantiate the connector and upload all data
connector(task, self.delta, self.database, self.verbosity).run()
# Log success
task.status = 'Done'
task.finished = datetime.now()
except Exception as e:
if task:
task.status = 'Failed'
task.message = '%s' % e
task.finished = datetime.now()
raise e
finally:
if task: task.save(using=self.database)
try: transaction.commit(using=self.database)
except: pass
settings.DEBUG = tmp_debug
transaction.set_autocommit(ac, using=self.database)
示例15: handle
def handle(self, *args, **options):
# Pick up the options
now = datetime.now()
self.database = options['database']
if self.database not in settings.DATABASES:
raise CommandError("No database settings known for '%s'" % self.database )
if options['user']:
try:
self.user = User.objects.all().using(self.database).get(username=options['user'])
except:
raise CommandError("User '%s' not found" % options['user'] )
else:
self.user = None
timestamp = now.strftime("%Y%m%d%H%M%S")
if self.database == DEFAULT_DB_ALIAS:
logfile = 'exporttofolder-%s.log' % timestamp
else:
logfile = 'exporttofolder_%s-%s.log' % (self.database, timestamp)
try:
handler = logging.FileHandler(os.path.join(settings.FREPPLE_LOGDIR, logfile), encoding='utf-8')
# handler.setFormatter(logging.Formatter(settings.LOGGING['formatters']['simple']['format']))
logger.addHandler(handler)
logger.propagate = False
except Exception as e:
print("%s Failed to open logfile %s: %s" % (datetime.now(), logfile, e))
task = None
errors = 0
try:
# Initialize the task
if options['task']:
try:
task = Task.objects.all().using(self.database).get(pk=options['task'])
except:
raise CommandError("Task identifier not found")
if task.started or task.finished or task.status != "Waiting" or task.name not in ('frepple_exporttofolder', 'exporttofolder'):
raise CommandError("Invalid task identifier")
task.status = '0%'
task.started = now
task.logfile = logfile
else:
task = Task(name='exporttofolder', submitted=now, started=now, status='0%', user=self.user, logfile=logfile)
task.arguments = ' '.join(['"%s"' % i for i in args])
task.processid = os.getpid()
task.save(using=self.database)
# Execute
if os.path.isdir(settings.DATABASES[self.database]['FILEUPLOADFOLDER']):
if not os.path.isdir(os.path.join(settings.DATABASES[self.database]['FILEUPLOADFOLDER'], 'export')):
try:
os.makedirs(os.path.join(settings.DATABASES[self.database]['FILEUPLOADFOLDER'], 'export'))
except OSError as exception:
if exception.errno != errno.EEXIST:
raise
logger.info("%s Started export to folder\n" % datetime.now())
cursor = connections[self.database].cursor()
task.status = '0%'
task.save(using=self.database)
i = 0
cnt = len(self.statements)
# Calling all the pre-sql statements
for stmt in self.pre_sql_statements:
try:
logger.info("Executing pre-statement '%s'" % stmt)
cursor.execute(stmt)
logger.info("%s record(s) modified" % cursor.rowcount)
except:
errors += 1
logger.error("An error occurred when executing statement '%s'" % stmt)
for cfg in self.statements:
# Validate filename
filename = cfg.get('filename', None)
if not filename:
raise Exception("Missing filename in export configuration")
folder = cfg.get('folder', None)
if not folder:
raise Exception("Missing folder in export configuration for %s" % filename)
logger.info("%s Started export of %s" % (datetime.now(), filename))
# Make sure export folder exists
exportFolder = os.path.join(settings.DATABASES[self.database]['FILEUPLOADFOLDER'], folder)
if not os.path.isdir(exportFolder):
os.makedirs(exportFolder)
try:
reportclass = cfg.get('report', None)
sql = cfg.get('sql', None)
if reportclass:
# Export from report class
# Create a dummy request
#.........这里部分代码省略.........