本文整理汇总了Python中pxStats.lib.StatsPaths.StatsPaths类的典型用法代码示例。如果您正苦于以下问题:Python StatsPaths类的具体用法?Python StatsPaths怎么用?Python StatsPaths使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
在下文中一共展示了StatsPaths类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: getTimeOfLastUpdateInLogs
def getTimeOfLastUpdateInLogs(self):
"""
@summary : Returns the time of the last update in iso format.
@return : None if no update as found, EPCH is returned in iso format,
as to make sure an update is made since no prior updates exist.
"""
timeOfLastUpdate = StatsDateLib.getIsoTodaysMidnight( StatsDateLib.getCurrentTimeInIsoformat() )
paths = StatsPaths()
paths.setPaths()
updatesDirectory = paths.STATSTEMPAUTUPDTLOGS + self.updateType + "/"
if not os.path.isdir( updatesDirectory ):
os.makedirs(updatesDirectory)
allEntries = os.listdir(updatesDirectory)
if allEntries !=[] :
allEntries.sort()
allEntries.reverse()
timeOfLastUpdate = os.path.basename( allEntries[0] ).replace( "_"," " )
return timeOfLastUpdate
示例2: getPreviousMonitoringJob
def getPreviousMonitoringJob( self, currentTime ):
"""
@summary : Gets the previous crontab from the pickle file.
@return : Time of the previous monitoring job.
@warning : Returns "" if file does not exist.
"""
statsPaths = StatsPaths()
statsPaths.setPaths()
file = "%spreviousMonitoringJob" %statsPaths.STATSMONITORING
previousMonitoringJob = ""
if os.path.isfile( file ):
fileHandle = open( file, "r" )
previousMonitoringJob = pickle.load( fileHandle )
fileHandle.close()
#print previousMonitoringJob
else:
previousMonitoringJob = StatsDateLib.getIsoTodaysMidnight( currentTime )
#print previousMonitoringJob
return previousMonitoringJob
示例3: __getAutomaticUpdatesDoneDuringTimeSpan
def __getAutomaticUpdatesDoneDuringTimeSpan( self, startTime, endtime ):
"""
@param startTime: Start time of the span in iso format
@param endtime: end time of the span in iso format
"""
#set to fit file standard
startTime = startTime.replace( " ", "_" )
endtime = endtime.replace( " ", "_" )
def afterEndTime(x):
return x <= endtime
def beforeStartTime(x):
return x >= startTime
paths = StatsPaths()
paths.setPaths()
updates = os.listdir( updatesDirectory = paths.STATSTEMPAUTUPDTLOGS + self.updateType + "/" )
updates = filter( afterEndTime, updates)
updates = filter( beforeStartTime, updates)
return updates
示例4: addAutomaticUpdateToLogs
def addAutomaticUpdateToLogs( self, timeOfUpdateInIsoFormat, currentUpdateFrequency = None ):
"""
@summary : Writes a new file in the log folder containing
the current update frequency.
@param timeOfUpdateInIsoFormat: Time that the entries name will sport.
"""
paths = StatsPaths()
paths.setPaths()
fileName = paths.STATSTEMPAUTUPDTLOGS + self.updateType + "/" + str( timeOfUpdateInIsoFormat ).replace( " ", "_" )
#Safety to make sure
if not os.path.isdir( os.path.dirname( fileName ) ):
os.makedirs( os.path.dirname( fileName ), 0777 )
if currentUpdateFrequency == None :
currentUpdateFrequency = self.getCurrentUpdateFrequency()
CpickleWrapper.save( currentUpdateFrequency, fileName )
allEntries = os.listdir(paths.STATSTEMPAUTUPDTLOGS + self.updateType + "/")
allEntries.sort()
entriesToRemove = allEntries[ :-self.numberOfLogsToKeep]
for entrytoRemove in entriesToRemove:
os.remove(paths.STATSTEMPAUTUPDTLOGS + self.updateType + "/" + entrytoRemove )
示例5: restoreDatabaseUpdateTimes
def restoreDatabaseUpdateTimes( timeToRestore, currentTime, nbBackupsToKeep ):
"""
@summary : Copy all databases into a folder sporting the data of the backup.
@param timeToRestore : Time of the DB backups to set as current DB.
@param currentTime : Time of the call to the script.
@param nbBackupsToKeep : total number of backups to keep.
"""
statsPaths = StatsPaths()
statsPaths.setPaths()
source = statsPaths.STATSDBUPDATESBACKUPS + "/%s" %timeToRestore
destination = statsPaths.STATSCURRENTDBUPDATES
#Archive current Database
backupRRDDatabases.backupDatabaseUpdateTimes( currentTime, nbBackupsToKeep, foldersToPreserve = [ source ] )
#restore desired
status, output = commands.getstatusoutput( "rm -r %s" %( destination ) )
os.makedirs(destination)
status, output = commands.getstatusoutput( "cp -rf %s/* %s" %( source, destination ) )
print output
示例6: __updateCsvFiles
def __updateCsvFiles( self, type, clusters, cost ):
"""
@summary : Generate th rx and tx csv files
for yesterday for all clusters.
@param type : daily | weekly | monthly | yearly
@param clusters : List of currently running source clusters.
@param cost : total operational cost for the perido specified
by the type
@return : None
"""
paths = StatsPaths()
paths.setPaths()
typeParameters = { "daily" : "-d", "weekly" : "-w", "monthly" : "-m", "yearly" : "-y" }
output = commands.getoutput( paths.STATSBIN + 'csvDataConversion.py --includeGroups %s --machines "%s" --machinesAreClusters --fixedPrevious --date "%s" -f rx --language %s' %( typeParameters[type], clusters, self.timeOfRequest, self.outputLanguage ) )
#print paths.STATSBIN + 'csvDataConversion.py --includeGroups %s --machines "%s" --machinesAreClusters --fixedPrevious --date "%s" -f rx --language %s' %( typeParameters[type], clusters, self.timeOfRequest, self.outputLanguage )
output = commands.getoutput( paths.STATSBIN + 'csvDataConversion.py --includeGroups %s --machines "%s" --machinesAreClusters --fixedPrevious --date "%s" -f tx --language %s' %( typeParameters[type], clusters, self.timeOfRequest, self.outputLanguage ) )
#print paths.STATSBIN + 'csvDataConversion.py --includeGroups %s --machines "%s" --machinesAreClusters --fixedPrevious --date "%s" -f tx --language %s' %( typeParameters[type], clusters, self.timeOfRequest, self.outputLanguage )
fileName = self.__getFileNameFromExecutionOutput(output)
if fileName != "":
commands.getstatusoutput(paths.STATSWEBPAGESGENERATORS + 'csvDataFiltersForWebPages.py -c %s -f %s ' %(cost, fileName) )
示例7: updateFilesAssociatedWithMachineTags
def updateFilesAssociatedWithMachineTags( tagsNeedingUpdates, machineParameters ):
"""
@summary : For all the tags for wich
a machine was change we rename all the
files associated with that tag.
@param tagsNeedingUpdates: List of tags that have been modified
since the last call.
"""
paths = StatsPaths()
paths.setPaths()
previousParameters = getMachineParametersFromPreviousCall()
for tag in tagsNeedingUpdates:
previousCombinedMachineNames = ""
previousCombinedMachineNames = previousCombinedMachineNames.join( [ x for x in previousParameters.getMachinesAssociatedWith( tag ) ] )
currentCombinedMachineNames = ""
currentCombinedMachineNames = currentCombinedMachineNames.join( [ x for x in machineParameters.getMachinesAssociatedWith( tag ) ])
output = commands.getoutput( "%sfileRenamer.py -o %s -n %s --overrideConfirmation" %( paths.STATSTOOLS, previousCombinedMachineNames, currentCombinedMachineNames ) )
print "%sfileRenamer.py -o %s -n %s --overrideConfirmation" %( paths.STATSTOOLS, previousCombinedMachineNames, currentCombinedMachineNames )
print output
示例8: transferLogFiles
def transferLogFiles():
"""
@summary : Log files will not be tansferred if local machine
is not designed to be a pickling machine.
If log files are to be transferred, they will be straight
from the source."
"""
paths = StatsPaths()
paths.setPaths()
parameters = StatsConfigParameters()
machineParameters = MachineConfigParameters()
machineParameters.getParametersFromMachineConfigurationFile()
parameters.getAllParameters()
individualSourceMachines = machineParameters.getMachinesAssociatedWithListOfTags( parameters.sourceMachinesTags )
individualPicklingMachines = machineParameters.getMachinesAssociatedWithListOfTags( parameters.picklingMachines )
for sourceMachine,picklingMachine in map( None, individualSourceMachines, individualPicklingMachines ) :
if picklingMachine == LOCAL_MACHINE :#pickling to be done here
userName = machineParameters.getUserNameForMachine(sourceMachine)
remoteLogPath = paths.getPXPathFromMachine( paths.PXLOG, sourceMachine, userName )
print "rsync -avzr --delete-before -e ssh %[email protected]%s:%s %s%s/ " %( userName , sourceMachine,remoteLogPath , paths.STATSLOGS, sourceMachine )
output = commands.getoutput( "rsync -avzr --delete-before -e ssh %[email protected]%s:%s %s%s/ " %( userName , sourceMachine, remoteLogPath, paths.STATSLOGS, sourceMachine ) )
print output
示例9: cleanUp
def cleanUp( configParameters, currentTime, daysOfPicklesToKeep ):
"""
@summary: Based on current time and frequencies contained
within the time parameters, we will run
the cleaners that need to be run.
@param configParameters: StatsConfigParameters instance.
@param currenTime: currentTime in seconds since epoch format.
"""
paths = StatsPaths()
paths.setPaths()
updateManager = AutomaticUpdatesManager(configParameters.nbAutoUpdatesLogsToKeep, "picklecleaner")
if updateManager.updateIsRequired(currentTime) :
output = commands.getoutput( paths.STATSTOOLS + "pickleCleaner.py %s" %int(daysOfPicklesToKeep) )
print paths.STATSTOOLS + "pickleCleaner.py" + " " + str( daysOfPicklesToKeep )
updateManager.addAutomaticUpdateToLogs( currentTime )
updateManager = AutomaticUpdatesManager(configParameters.nbAutoUpdatesLogsToKeep, "generalCleaner")
if updateManager.updateIsRequired(currentTime) :
commands.getstatusoutput( paths.STATSTOOLS + "clean_dir.plx" + " " + paths.STATSETC + "clean_dir.conf" )
print paths.STATSTOOLS + "clean_dir.plx" + " " + paths.STATSETC + "clean_dir.conf"
updateManager.addAutomaticUpdateToLogs( currentTime )
示例10: __getDocFilesToLinkTo
def __getDocFilesToLinkTo(self, language):
"""
@summary : Gathers and returns all the documentation files
currently available
@summary : The list of fileNames to link to.
"""
filesToLinkTo = []
statsPaths = StatsPaths()
statsPaths.setPaths( self.mainLanguage )
folder = statsPaths.STATSDOC + "html/"
listOfFilesInFolder = os.listdir(folder)
for file in listOfFilesInFolder:
baseName = os.path.basename(file)
if( fnmatch.fnmatch( baseName, "*_%s.html"%(language) ) ):
filesToLinkTo.append( baseName )
filesToLinkTo.sort()
return filesToLinkTo
示例11: saveList
def saveList( self, user, clients ):
"""
@summary : Saves list.
@note : Will include modification made in updateFileInlist method
@param clients : Client to wich the file is related(used to narrow down searchs)
@param user : Name of the client, person, etc.. wich has a relation with the
file.
"""
statsPaths = StatsPaths()
statsPaths.setPaths()
directory = statsPaths.STATSDATA + "fileAccessVersions/"
combinedName = ""
for client in clients:
combinedName = combinedName + client
fileName = combinedName + "_" + user
if not os.path.isdir( directory ):
os.makedirs( directory, mode=0777 )
#create directory
completeFilename = directory + fileName
#print "saving %s" %completeFilename
CpickleWrapper.save( object = self.savedFileList, filename = completeFilename )
示例12: getClientsCurrentFileList
def getClientsCurrentFileList( self, clients ):
"""
@summary : Gets all the files associated with the list of clients.
@note : Client list is used here since we need to find all the pickles that will be used in a merger.
Thus unlike all other methods we dont refer here to the combined name but rather to a list of
individual machine names.
@summary : Returns the all the files in a dictionnary associated
with each file associated with it's mtime.
"""
fileNames = []
statsPaths = StatsPaths()
statsPaths.setPaths()
for client in clients :
filePattern = statsPaths.STATSPICKLES + client + "/*/*" #_??
folderNames = glob.glob( filePattern )
for folder in folderNames:
if os.path.isdir( folder ):
filePattern = folder + "/" + "*_??"
fileNames.extend( glob.glob( filePattern ) )
for fileName in fileNames :
self.currentClientFileList[fileName] = os.path.getmtime( fileName )
return self.currentClientFileList
示例13: getSavedList
def getSavedList( self, user, clients ):
"""
@summary : Returns the checksum of the files contained in the saved list.
"""
self.savedFileList = {}
statsPaths = StatsPaths()
statsPaths.setPaths()
directory = statsPaths.STATSDATA + "fileAccessVersions/"
combinedName = ""
for client in clients:
combinedName = combinedName + client
fileName = combinedName + "_" + user
try :
self.savedFileList = CpickleWrapper.load( directory + fileName )
if self.savedFileLis == None :
self.savedFileList = {}
except: # if file does not exist
pass
return self.savedFileList
示例14: updatePickledTimes
def updatePickledTimes( dateToSet = "2006-10-23 09:00:00" ):
"""
@summary : Get all the keys then set all of them to the desired date.
"""
statsPaths = StatsPaths()
statsPaths.setPaths()
folder = statsPaths.STATSPICKLESTIMEOFUPDATES
files = os.listdir(folder)
for fileName in files :
if os.path.isfile( fileName ):
fileHandle = open( fileName, "r" )
pickledTimes = pickle.load( fileHandle )
fileHandle.close()
keys = pickledTimes.keys()
for key in keys:
pickledTimes[key] = dateToSet
fileHandle = open( fileName, "w" )
pickle.dump( pickledTimes, fileHandle )
fileHandle.close()
示例15: main
def main():
"""
@summary : Small test case scenario allows
for unit-like testing of the LanguageTools
class.
"""
configParameters = StatsConfigParameters()
configParameters.getAllParameters()
language = configParameters.mainApplicationLanguage
paths = StatsPaths()
paths.setBasicPaths()
print "Language set in config file : %s" %language
print "Test1 : (Should show that the proper translation file will be used) "
fileName = LanguageTools.getTranslationFileName( language, paths.STATSLIB + 'StatsPlotter' )
print "Translation file to be used : %s " %( fileName )
print "Test2 : (Should translate the word into the specified language) "
translator = LanguageTools.getTranslator( fileName )
print "Translation for bytecount : %s" %( translator("bytecount") )
print "Test3 : (Should be the same result as test 2) "
translator = LanguageTools.getTranslatorForModule( paths.STATSLIB + 'StatsPlotter', language )
print "Translation for bytecount : %s" %( translator("bytecount") )
print "Test4 : Unless translation changes, this should print 'filecount' "
print "Result : ", LanguageTools.translateTerm("nbreDeFichiers", "fr", "en", paths.STATSLIB + "StatsPlotter.py" )