本文整理汇总了Python中rdw_helpers.joinPaths函数的典型用法代码示例。如果您正苦于以下问题:Python joinPaths函数的具体用法?Python joinPaths怎么用?Python joinPaths使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。
在下文中一共展示了joinPaths函数的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: restoreFileOrDir
def restoreFileOrDir(repoRoot, dirPath, filename, restoreDate, useZip):
""" returns a file path to the file. User is responsible for deleting file, as well as containing dir, after use. """
filePath = joinPaths(dirPath, filename)
filePath = rdiffQuotedPath(repoRoot).getQuotedPath(filePath)
checkRepoPath(repoRoot, filePath)
restoredFilename = filename
if restoredFilename == "/":
restoredFilename = "(root)"
fileToRestore = joinPaths(repoRoot, dirPath, filename)
dateString = str(restoreDate.getSeconds())
rdiffOutputFile = joinPaths(tempfile.mkdtemp(), restoredFilename) # TODO: make so this includes the username
results = rdw_helpers.execute("rdiff-backup", "--restore-as-of="+dateString, fileToRestore, rdiffOutputFile)
if results['exitCode'] != 0 or not os.access(rdiffOutputFile, os.F_OK):
error = results['stderr']
if not error:
error = 'rdiff-backup claimed success, but did not restore anything. This indicates a bug in rdiffWeb. Please report this to a developer.'
raise UnknownError('Unable to restore! rdiff-backup output:\n'+error)
if os.path.isdir(rdiffOutputFile):
if useZip:
rdw_helpers.recursiveZipDir(rdiffOutputFile, rdiffOutputFile+".zip")
rdw_helpers.removeDir(rdiffOutputFile)
rdiffOutputFile = rdiffOutputFile+".zip"
else:
rdw_helpers.recursiveTarDir(rdiffOutputFile, rdiffOutputFile+".tar.gz")
rdw_helpers.removeDir(rdiffOutputFile)
rdiffOutputFile = rdiffOutputFile+".tar.gz"
return rdiffOutputFile
示例2: checkRepoPath
def checkRepoPath(repoRoot, filePath):
# Make sure repoRoot is a valid rdiff-backup repository
dataPath = joinPaths(repoRoot, rdiffDataDirName)
if not os.access(dataPath, os.F_OK) or not os.path.isdir(dataPath):
raise DoesNotExistError()
# Make sure there are no symlinks in the path
pathToCheck = joinPaths(repoRoot, filePath)
while True:
pathToCheck = pathToCheck.rstrip("/")
if os.path.islink(pathToCheck):
raise AccessDeniedError()
(pathToCheck, file) = os.path.split(pathToCheck)
if not file:
break
# Make sure that the folder/file exists somewhere - either in the current folder, or in the incrementsDir
if not os.access(joinPaths(repoRoot, filePath), os.F_OK):
(parentFolder, filename) = os.path.split(joinPaths(repoRoot, rdiffIncrementsDirName, filePath))
try:
increments = os.listdir(parentFolder)
except OSError:
increments = []
increments = filter(lambda x: x.startswith(filename), increments)
if not increments:
raise DoesNotExistError()
示例3: getDirEntries
def getDirEntries(self):
""" returns dictionary of dir entries, keyed by dir name """
entriesDict = {}
# First, we grab a dir listing of the target, setting entry attributes
for entryName in self.entries:
if entryName == rdiffDataDirName: continue
entryPath = joinPaths(self.repo, self.dirPath, entryName)
newEntry = dirEntry(entryName, os.path.isdir(entryPath), os.lstat(entryPath)[6], True,
[self._getLastChangedBackupTime(entryName)])
entriesDict[entryName] = newEntry
# Go through the increments dir. If we find any files that didn't exist in dirPath (i.e. have been deleted), add them
for entryFile in self.incrementEntries:
entry = incrementEntry(entryFile)
entryName = entry.getFilename()
if entry.shouldShowIncrement() or entry.isMissingIncrement():
entryDate = entry.getDate()
if not entry.isSnapshotIncrement():
if entry.isMissingIncrement():
entryDate = self._getFirstBackupAfterDate(entry.getDate())
else:
entryDate = entry.getDate()
if not entryName in entriesDict.keys():
entryPath = joinPaths(self.repo, rdiffIncrementsDirName, self.dirPath, entryName)
newEntry = dirEntry(entryName, os.path.isdir(entryPath), 0, False, [entryDate])
entriesDict[entryName] = newEntry
else:
if not entryDate in entriesDict[entryName].changeDates:
bisect.insort_left(entriesDict[entryName].changeDates, entryDate)
return entriesDict
示例4: testGetDirEntries
def testGetDirEntries(self):
tests = self.getBackupTests()
for testDir in tests:
# Get a list of backup entries for the root folder
rdiffDestDir = joinPaths(self.destRoot, testDir)
entries = getDirEntries(rdiffDestDir, "/")
# Go back through all backup states and make sure that the backup entries match the files that exist
origStateDir = joinPaths(self.masterDirPath, testDir)
backupStates = self.getBackupStates(origStateDir)
backupStates.sort(lambda x, y: cmp(x, y))
for backupState in backupStates:
backupTime = rdw_helpers.rdwTime()
backupTime.initFromString(backupState)
# Go through each file, and make sure we have a backup entry for this file and date
origStateDir = joinPaths(self.masterDirPath, testDir, backupState)
files = self.getBackupStates(origStateDir)
for file in files:
origFilePath = joinPaths(origStateDir, file)
entry = getMatchingDirEntry(entries, file)
assertionErrorMessage = "backupTime "+backupTime.getDisplayString()+" not found in backup entries for backup test \""+testDir+"\" for file \""+file+"\". Returned changeDates:"
for changeDate in entry.changeDates:
assertionErrorMessage = assertionErrorMessage + "\n"+changeDate.getDisplayString()
assertionErrorMessage = assertionErrorMessage + "\nIncrements dir: "+str(os.listdir(joinPaths(rdiffDestDir, "rdiff-backup-data", "increments")))
for entryDate in entry.changeDates:
if backupTime.getSeconds() == entryDate.getSeconds():
if self.fileChangedBetweenBackups(testDir, entry.name, backupState, backupStates):
assert False, assertionErrorMessage
break
else:
if not self.fileChangedBetweenBackups(testDir, entry.name, backupState, backupStates):
assert False or False, assertionErrorMessage
assert os.path.isdir(origFilePath) == entry.isDir
示例5: fileChangedBetweenBackups
def fileChangedBetweenBackups(self, backupTest, filename, lastBackup, allBackups):
prevRevisions = filter(lambda x: x < lastBackup, allBackups)
if not prevRevisions: return False
oldVersion = prevRevisions[-1]
oldFilePath = joinPaths(self.masterDirPath, backupTest, oldVersion, filename)
newFilePath = joinPaths(self.masterDirPath, backupTest, lastBackup, filename)
if not os.access(oldFilePath, os.F_OK): return False
return open(oldFilePath, "r").read() == open(newFilePath, "r").read()
示例6: restoreFile
def restoreFile(repoRoot, dirPath, filename, restoreDate):
""" returns a file path to the file. User is responsible for deleting file, as well as containing dir, after use. """
checkRepoPath(repoRoot, joinPaths(dirPath, filename))
fileToRestore = joinPaths(repoRoot, dirPath, filename)
dateString = str(restoreDate.getSeconds())
rdiffOutputFile = joinPaths(tempfile.mkdtemp(), filename) # TODO: make so this includes the username
args = [ "rdiff-backup", "--restore-as-of="+dateString, fileToRestore, rdiffOutputFile ]
os.spawnvp(os.P_WAIT, args[0], args)
if not os.access(rdiffOutputFile, os.F_OK):
raise UnknownError()
return rdiffOutputFile
示例7: getBackupHistory
def getBackupHistory(self, numLatestEntries=-1, earliestDate=None, latestDate=None, includeInProgress=True):
"""Returns a list of backupHistoryEntry's
earliestDate and latestDate are inclusive."""
# Get a listing of error log files, and use that to build backup history
curEntries = filter(lambda x: x.startswith("error_log."), self.dirEntries)
curEntries.reverse()
entries = []
for entryFile in curEntries:
entry = incrementEntry(self.pathQuoter, entryFile)
# compare local times because of discrepency between client/server time zones
if earliestDate and entry.getDate().getLocalSeconds() < earliestDate.getLocalSeconds():
continue
if latestDate and entry.getDate().getLocalSeconds() > latestDate.getLocalSeconds():
continue
try:
if entry.isCompressed():
errors = gzip.open(joinPaths(self.rdiffDir, entryFile), "r").read()
else:
errors = open(joinPaths(self.rdiffDir, entryFile), "r").read()
except IOError:
errors = "[Unable to read errors file.]"
try:
sessionStatsFile = self._getSessionStatsFile(entry)
session_stats = open(joinPaths(self.rdiffDir, sessionStatsFile), "r").read()
fileSize = re.compile("SourceFileSize ([0-9]+) ").findall(session_stats)[0]
incrementSize = re.compile("IncrementFileSize ([0-9]+) ").findall(session_stats)[0]
except IOError:
fileSize = 0
incrementSize = 0
newEntry = backupHistoryEntry()
newEntry.date = entry.getDate()
newEntry.inProgress = self._backupIsInProgress(entry.getDate())
if not includeInProgress and newEntry.inProgress:
continue
if newEntry.inProgress:
newEntry.errors = ""
else:
newEntry.errors = errors
newEntry.size = int(fileSize)
newEntry.incrementSize = int(incrementSize)
entries.insert(0, newEntry)
if numLatestEntries != -1 and len(entries) == numLatestEntries:
return entries
return entries
示例8: _getBackupHistory
def _getBackupHistory(repoRoot, numLatestEntries=-1, earliestDate=None, latestDate=None, includeInProgress=True):
"""Returns a list of backupHistoryEntry's"""
checkRepoPath(repoRoot, "")
# Get a listing of error log files, and use that to build backup history
rdiffDir = joinPaths(repoRoot, rdiffDataDirName)
curEntries = os.listdir(rdiffDir)
curEntries = filter(lambda x: x.startswith("error_log."), curEntries)
curEntries.sort()
entries = []
for entryFile in curEntries:
entry = incrementEntry(entryFile)
# compare local times because of discrepency between client/server time zones
if earliestDate and entry.getDate().getLocalSeconds() < earliestDate.getLocalSeconds():
continue
if latestDate and entry.getDate().getLocalSeconds() > latestDate.getLocalSeconds():
continue
try:
if entry.isCompressed():
errors = gzip.open(joinPaths(rdiffDir, entryFile), "r").read()
else:
errors = open(joinPaths(rdiffDir, entryFile), "r").read()
except IOError:
errors = "[Unable to read errors file.]"
try:
sessionStatsPath = getSessionStatsFile(rdiffDir, entry)
session_stats = open(sessionStatsPath, "r").read()
expression = re.compile("SourceFileSize ([0-9]+) ").findall(session_stats)[0]
except IOError:
expression = 0
newEntry = backupHistoryEntry()
newEntry.date = entry.getDate()
newEntry.inProgress = backupIsInProgress(repoRoot, entry.getDate())
if newEntry.inProgress:
newEntry.errors = ""
else:
newEntry.errors = errors
newEntry.size = int(expression)
entries.append(newEntry)
if len(entries) > 0 and not includeInProgress and backupIsInProgressForRepo(repoRoot):
entries.pop()
if numLatestEntries != -1:
entries = entries[-numLatestEntries:]
return entries
示例9: testGetBackupHistory
def testGetBackupHistory(self):
tests = self.getBackupTests()
for testDir in tests:
# Get a list of backup entries for the root folder
origBackupDir = joinPaths(self.masterDirPath, testDir)
backupStates = self.getBackupStates(origBackupDir)
backupStates.sort(lambda x, y: cmp(x, y))
rdiffDestDir = joinPaths(self.destRoot, testDir)
entries = getBackupHistory(rdiffDestDir)
assert len(entries) == len(backupStates)
backupNum = 0
for backup in backupStates:
origBackupStateDir = joinPaths(origBackupDir, backup)
totalBackupSize = 0
for file in os.listdir(origBackupStateDir):
totalBackupSize = totalBackupSize + os.lstat(joinPaths(origBackupStateDir, file))[6]
#TODO: fix this to handle subdirs
#assert totalBackupSize == entries[backupNum].size, "Calculated: "+str(totalBackupSize)+" Reported: "+str(entries[backupNum].size)+" State: "+str(backupNum)
backupNum = backupNum + 1
# Test that the last backup entry works correctly
lastEntry = getLastBackupHistoryEntry(rdiffDestDir)
lastBackupTime = rdw_helpers.rdwTime()
lastBackupTime.initFromString(backupStates[-1])
assert lastEntry.date == lastBackupTime
# Test that timezone differences are ignored
historyAsOf = lastEntry.date.getUrlString()
# if "+" in historyAsOf:
# historyAsOf = historyAsOf.replace("+", "-")
# else:
# historyAsOf = historyAsOf[:19] + "+" + historyAsOf[20:]
lastBackupTime = rdw_helpers.rdwTime()
lastBackupTime.initFromString(historyAsOf)
entries = getBackupHistorySinceDate(rdiffDestDir, lastBackupTime)
assert len(entries) == 1
# Test that no backups are returned one second after the last backup
historyAsOf = historyAsOf[:18] + "1" + historyAsOf[19:]
postBackupTime = rdw_helpers.rdwTime()
postBackupTime.initFromString(historyAsOf)
assert lastBackupTime.getLocalSeconds() + 1 == postBackupTime.getLocalSeconds()
entries = getBackupHistorySinceDate(rdiffDestDir, postBackupTime)
assert len(entries) == 0
示例10: getSessionStatsFile
def getSessionStatsFile(rdiffDataDir, entry):
"""Attempts to get the sessions statistics file for a given backup. Tries the following to find a match:
1. The date with no timezone information
2. The date, 1 hour in the past, with no timezone information
3. The date with timezone information"""
sessionStatsPath = joinPaths(rdiffDataDir, getSessionStatsFileName(entry.getDateStringNoTZ()))
if os.access(sessionStatsPath, os.F_OK):
return sessionStatsPath
sessionStatsPath = joinPaths(rdiffDataDir, getSessionStatsFileName(entry.getDateStringNoTZ(-60*60)))
if os.access(sessionStatsPath, os.F_OK):
return sessionStatsPath
sessionStatsPath = joinPaths(rdiffDataDir, getSessionStatsFileName(entry.getDateString()))
if os.access(sessionStatsPath, os.F_OK):
return sessionStatsPath
return ""
示例11: getParmsForPage
def getParmsForPage(self, root, repos):
repoList = []
repoErrors = []
for userRepo in repos:
try:
repoHistory = librdiff.getLastBackupHistoryEntry(rdw_helpers.joinPaths(root, userRepo))
except librdiff.FileError:
repoSize = "0"
repoDate = "Error"
repoErrors.append(
{
"repoName": userRepo,
"repoSize": repoSize,
"repoDate": repoDate,
"repoBrowseUrl": self.buildBrowseUrl(userRepo, "/", False),
"repoHistoryUrl": self.buildHistoryUrl(userRepo),
}
)
else:
repoSize = rdw_helpers.formatFileSizeStr(repoHistory.size)
if repoHistory.inProgress:
repoSize = "In Progress"
repoDate = repoHistory.date.getDisplayString()
repoList.append(
{
"repoName": userRepo,
"repoSize": repoSize,
"repoDate": repoDate,
"repoBrowseUrl": self.buildBrowseUrl(userRepo, "/", False),
"repoHistoryUrl": self.buildHistoryUrl(userRepo),
}
)
return {"title": "browse", "repos": repoList, "badrepos": repoErrors}
示例12: getBackupHistory
def getBackupHistory(repoRoot, numLatestEntries=-1):
"""Returns a list of backupHistoryEntry's"""
checkRepoPath(repoRoot, "")
# Get a listing of error log files, and use that to build backup history
rdiffDir = joinPaths(repoRoot, rdiffDataDirName)
curEntries = os.listdir(rdiffDir)
curEntries = filter(lambda x: x.startswith("error_log."), curEntries)
curEntries.sort()
if numLatestEntries != -1:
assert numLatestEntries > 0
curEntries = curEntries[-numLatestEntries:]
curEntries.reverse()
entries = []
for entryFile in curEntries:
entry = incrementEntry(entryFile)
try:
errors = gzip.open(os.path.join(rdiffDir, entryFile), "r").read()
except IOError:
errors = "[Unable to read errors file.]"
try:
sessionStatsPath = getSessionStatsFile(rdiffDir, entry)
session_stats = open(sessionStatsPath, "r").read()
expression = re.compile("SourceFileSize ([0-9]+) ").findall(session_stats)[0]
except IOError:
expression = 0
newEntry = backupHistoryEntry()
newEntry.date = entry.getDate()
newEntry.errors = errors
newEntry.size = int(expression)
entries.append(newEntry)
return entries
示例13: getParmsForPage
def getParmsForPage(self, root, repos):
repoList = []
for reponame in repos:
try:
repoHistory = librdiff.getLastBackupHistoryEntry(rdw_helpers.joinPaths(root, reponame))
reposize = rdw_helpers.formatFileSizeStr(repoHistory.size)
reposizeinbytes = repoHistory.size
if repoHistory.inProgress:
reposize = "En Progreso"
repoDate = repoHistory.date.getDisplayString()
repodateinseconds = repoHistory.date.getLocalSeconds()
failed = False
except librdiff.FileError:
logging.exception("No se puede obtener informacion previa de %s" % reponame)
reposize = "0"
reposizeinbytes = 0
repoDate = "Error"
repodateinseconds = 0
failed = True
repoList.append({ "reponame" : reponame,
"reposize" : reposize,
"reposizeinbytes" : reposizeinbytes,
"repodate" : repoDate,
"repodateinseconds" : repodateinseconds,
"repoBrowseUrl" : self.buildBrowseUrl(reponame, "/", False),
"repoHistoryUrl" : self.buildHistoryUrl(reponame),
'failed': failed})
self._sortLocations(repoList)
return { "title" : "browse", "repos" : repoList }
示例14: sendEmails
def sendEmails(self):
for user in self.userDB.getUserList():
userRepos = self.userDB.getUserRepoPaths(user)
oldRepos = []
for repo in userRepos:
maxDaysOld = self.userDB.getRepoMaxAge(user, repo)
if maxDaysOld != 0:
# get the last backup date
try:
lastBackup = librdiff.getLastBackupHistoryEntry(rdw_helpers.joinPaths(self.userDB.getUserRoot(user), repo), False)
except librdiff.FileError:
pass # Skip repos that have never been successfully backed up
else:
if lastBackup:
oldestGoodBackupTime = rdw_helpers.rdwTime()
oldestGoodBackupTime.initFromMidnightUTC(-maxDaysOld)
if lastBackup.date < oldestGoodBackupTime:
oldRepos.append({"repo" : repo, "lastBackupDate" : lastBackup.date.getDisplayString(), "maxAge" : maxDaysOld })
if oldRepos:
userEmailAddress = self.userDB.getUserEmail(user)
emailText = rdw_helpers.compileTemplate("email_notification.txt", repos=oldRepos, sender=self._getEmailSender(), user=user)
session = smtplib.SMTP(self._getEmailHost())
session.login(self._getEmailUsername(), self._getEmailPassword())
smtpresult = session.sendmail(self._getEmailSender(), userEmailAddress.split(";"), emailText)
session.quit()
示例15: getParmsForPage
def getParmsForPage(self, root, repos):
repoList = []
for userRepo in repos:
try:
repoHistory = librdiff.getLastBackupHistoryEntry(rdw_helpers.joinPaths(root, userRepo))
except librdiff.FileError:
repoSize = "0"
repoDate = "Error"
repoList.append({ "repoName" : userRepo,
"repoSize" : repoSize,
"repoDate" : repoDate,
"repoBrowseUrl" : self.buildBrowseUrl(userRepo, "/", False),
"repoHistoryUrl" : self.buildHistoryUrl(userRepo),
'failed': True})
else:
repoSize = rdw_helpers.formatFileSizeStr(repoHistory.size)
if repoHistory.inProgress:
repoSize = "In Progress"
repoDate = repoHistory.date.getDisplayString()
repoList.append({ "repoName" : userRepo,
"repoSize" : repoSize,
"repoDate" : repoDate,
"repoBrowseUrl" : self.buildBrowseUrl(userRepo, "/", False),
"repoHistoryUrl" : self.buildHistoryUrl(userRepo),
'failed': False})
self._sortLocations(repoList)
# Make second pass through list, setting the 'altRow' attribute
for i in range(0, len(repoList)):
repoList[i]['altRow'] = (i % 2 == 0)
return { "title" : "browse", "repos" : repoList }