本文整理汇总了Python中PyDbLite.Base.create方法的典型用法代码示例。如果您正苦于以下问题:Python Base.create方法的具体用法?Python Base.create怎么用?Python Base.create使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类PyDbLite.Base
的用法示例。
在下文中一共展示了Base.create方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: __init__
# 需要导入模块: from PyDbLite import Base [as 别名]
# 或者: from PyDbLite.Base import create [as 别名]
def __init__(self, sID, onePiece):
today = datetime.date.today()
self.sID = sID
self.opp = onePiece
self.historyDB = Base("F://alfStock//"+"alf123"+'.history')
self.currentDB = Base("F://alfStock//"+"alf123"+'.current')
self.historyDB.open()
self.currentDB.open()
db = Base("F://alfStock//"+str(today)+'.db')
impactDB = Base("F://alfStock//"+str(today)+'.yv')
if db.exists():
db.open()
recs = [ r for r in db if r['sid'] == self.sID ]
if len(recs) > 0:
self.history = recs[0]['history']
self.sCurrent = recs[0]['current']
else:
print "already existed: ", len(db)
self.insertHistory(db)
else:
db.create('sid','history', 'current')
self.insertHistory(db)
if impactDB.exists():
self.idb = impactDB
else:
impactDB.create('sid','UpOrDown')# U:up; D:down
impactDB.open()
impactDB.commit()
self.idb = impactDB
示例2: welcome
# 需要导入模块: from PyDbLite import Base [as 别名]
# 或者: from PyDbLite.Base import create [as 别名]
def welcome():
db = Base('alf.db')
db.create('name','pwd',mode="open") #override
user = request.form['Username']
passwd = request.form['password1']
db.insert(name=user,pwd=passwd)
db.commit()
return 'welcome ' + user
示例3: resetHisDB
# 需要导入模块: from PyDbLite import Base [as 别名]
# 或者: from PyDbLite.Base import create [as 别名]
def resetHisDB():
historyDB = Base("F://alfStock//"+"alf123"+'.history')
historyDB.create('sid','Edate', 'length')#Edate := started day not end day
historyDB.open()
historyDB.commit()
currentDB = Base("F://alfStock//"+"alf123"+'.current')
currentDB.create('sid','Edate', 'length')
currentDB.open()
currentDB.commit()
示例4: KopeteLog
# 需要导入模块: from PyDbLite import Base [as 别名]
# 或者: from PyDbLite.Base import create [as 别名]
class KopeteLog():
def __init__(self, directory=None):
if not directory:
directory=os.path.join(os.path.expanduser("~"),
".kde/share/apps/kopete/logs")
self.messages = Base('kopete.db') # Database stuff - Initializing...
self.messages.create('protocol', 'date', 'time', 'msgfrom', 'msgto', 'sender', 'inbound', 'nick', 'message', mode='override')
for file in self.searchLogs(directory):
self.feedDatabase(file)
def searchLogs(self, dir):
logfiles = []
for (basepath, dirnames, filenames) in os.walk(dir):
for child in filenames:
if child.endswith (".xml"):
logfiles.append(os.path.join(basepath, child))
return logfiles
def feedDatabase(self, filepath):
if 'WlmProtocol' in filepath:
protocol = 'wlm'
elif 'ICQProtocol' in filepath:
protocol = 'icq'
elif 'JabberProtocol' in filepath:
protocol = 'jabber'
else:
protocol = 'unknown'
xmllog = parse(filepath)
for head in xmllog.getiterator('head'):
for date in head.getiterator('date'):
month=date.attrib['month']
year=date.attrib['year']
for contact in head.getiterator('contact'):
if contact.attrib.has_key('type'):
if contact.attrib['type'] == 'myself':
contactfrom = contact.attrib['contactId']
else:
contactto = contact.attrib['contactId']
for msg in xmllog.getiterator('msg'):
nick = msg.attrib['nick']
time = msg.attrib['time']
inbound = msg.attrib['in']
message = msg.text
sender = msg.attrib['from']
date = datetime.strptime("%s;%s;%s" %
(year, month, msg.attrib['time']) ,
"%Y;%m;%d %H:%M:%S")
self.messages.insert(
protocol=protocol,
date=date.strftime("%Y%m%d"),
time=date.strftime("%H:%M:%S"),
msgfrom=contactfrom, msgto=contactto,
sender=sender, inbound=inbound, nick=nick,
message=message)
示例5: getStock
# 需要导入模块: from PyDbLite import Base [as 别名]
# 或者: from PyDbLite.Base import create [as 别名]
def getStock(self, sid):
sname = self.dbname(sid)
exist = os.path.isfile(sname)
db = ''
if (exist):
#read db
db = Base(sname)
db.open()
else:
#create a new db
db = Base(sname)
db.create('date','buySig','sellSig', 'state', 'buy', 'sell' ,mode="override")
db.open()
return db
示例6: Queue
# 需要导入模块: from PyDbLite import Base [as 别名]
# 或者: from PyDbLite.Base import create [as 别名]
class Queue(object):
""" Simple queue with PyDbLite backend. """
def __init__(self, queue_type=0, config=None):
if config is not None:
# copy config
pass
else:
self.commit = False
self.db_filename = "/tmp/queue.pydb"
self.mode = "override"
self.queue = Base(self.db_filename)
def create_queue(self):
self.queue.create('id', 'item', mode = self.mode)
self.queue.create_index('id')
return None
def push(self, item):
self.queue.insert(self.length(), item)
return None
def pop(self):
if not self.is_empty():
id = self.length() - 1
r = self.queue._id[id]
self.queue.delete(r)
return r
else:
return None
def list(self):
return self.queue.records
def length(self):
return len(self.queue)
def is_empty(self):
return self.length() == 0
def commit(self):
if self.commit is True:
self.queue.commit()
return None
示例7: makeDB
# 需要导入模块: from PyDbLite import Base [as 别名]
# 或者: from PyDbLite.Base import create [as 别名]
def makeDB(read, write, startTime = "2010-01-01 00:00:00", \
endTime = "2010-01-01 00:10:00"):
db = Base(write)
startTime = calc.datetonumber(startTime)
endTime = calc.datetonumber(endTime)
#Day comes from day of the week. It is a number from 0 to 6.
#0 = Monday 6 = Sunday.
db.create('sensor', 'date', 'weekday', 'index', mode="override")
db.open()
allData = {}
for i in range(len(bbdata.allSensors)):
s = bbdata.allSensors[i]
data = []
print "Parsing sensor " + str(s)
try:
sString = read + "sensor" + str(s) + ".txt"
f = open(sString).readlines()
oldD = None
for timeLine in f:
tmp = timeLine.split()
tmp = tmp[1] + " " + tmp[2]
#tmp = tmp[0] + " " + tmp[1]
d = datetime.datetime.strptime(tmp, "%Y-%m-%d %H:%M:%S")
foo = calc.datetonumber(d)
if foo >= startTime and foo <= endTime:
data.append(calc.datetonumber(d))
if d.toordinal() != oldD:
#Add to database
db.insert(s, d.toordinal(), d.weekday(), len(data) - 1)
oldD = d.toordinal()
print " " + str(d)
except Exception, e:
print "Except:" + str(e)
pass
allData[s] = data
示例8: __init__
# 需要导入模块: from PyDbLite import Base [as 别名]
# 或者: from PyDbLite.Base import create [as 别名]
class ArticleDB:
"""class for persistent storage of articles.
what is stored from each Article object is defined in Article.TO_SAVE
"""
def __init__(self, dbfile, mode = 'open', autocommit = False):
self.db = Base(dbfile)
self.db.create(*Article.TO_SAVE, **{'mode': mode})
self.db.create_index(*INDEX_ON)
self.autocommit = autocommit
def insertArticle(self, art):
"""insert article into database, with id consistency check"""
present = []
if art.id_int != None:
present.extend(self.db._id_int[art.id_int])
# if art.idZBL != None:
# present.extend(self.db._idZBL[art.idZBL])
# if art.idMR != None:
# present.extend(self.db._idMR[art.idMR])
ids = list(set([rec['__id__'] for rec in present])) # unique ids
present = [self.db[id] for id in ids] # remove duplicate identical entries (coming from matches on more than one id on the same article)
new = art
for old in present: # FIXME HACK turns off consistency checking
try:
new.mergeWith(Article.Article(record = old)) # article already present in database -- check if ids are consistent, update it with new info from art
except Exception, e:
# logging.error('inconsistent database contents (%i overlapping records); leaving database unchanged' % (len(present)))
#logging.info('inconsistency between \n%s\n%s' % (new, Article.Article(old)))
logging.warning('inconsistency between %s and %s' % (new, Article.Article(old)))
# return False
if len(present) == 0:
# logging.debug('inserting a new article')
pass
else:
# logging.debug('replacing %i old (consistent) record(s) for %s' % (len(present), new))
pass
self.db.delete(present)
id = self.db.insert(**new.__dict__)
if self.autocommit:
self.commit()
return True
示例9: __init__
# 需要导入模块: from PyDbLite import Base [as 别名]
# 或者: from PyDbLite.Base import create [as 别名]
class doAnalysis:
#What do we need in init now? Ah, the analysis cache DB
def __init__(self):
self.AnalysisResults = Base('AnalysisResults/AnalysisResults.pdl')
#check if the DB exists. If Yes, open, if not
#create it:
if not self.AnalysisResults.exists():
self.genPDL()
else:
self.AnalysisResults.open()
self.PassportOffice = EventPassport.EventPassportOffice()
self.LoadWaveform = WaveformLoader.LoadWave()
self.AcousticAnalysis = AnalysisTools.AcousticsAnalysis()
self.SignalManip = SignalManip.SignalManip()
#If DB doesnt exist, make it!
def genPDL(self):
#Create the PDL file for database
self.AnalysisResults.create('EventID','PVar', mode = "open")
#Gen PVAr of the Signals!
def genPVAR(self):
'''
Filter Params.
doFilter -> Filter on or OFF
lowfreq_HP -> Low frequency High Pass
highFreq_LP -> High Frequency low pass
Set both for a band pass filter.
Filter Types:
ApplyFiltersWall -> Boxcar window
ApplyFiltersFIR -> Kaiser Window
'''
doFilter = True
lowFreq_HP = 3000
highFreq_LP = None
####Neutron Data#####
#get the list of events
PVar_Neutron_List = []
EventList = self.PassportOffice.CheckPassport_Runtype("Neutron")
#For every Event
for Event in EventList:
#Load Raw data
raw_data = self.LoadWaveform.LoadData(Event['Path'][:-3])
#Apply filter. See the docstring
#for options
if doFilter:
filtered_data = self.AcousticAnalysis.ApplyFiltersWall(raw_data[0], lowFreq=lowFreq_HP, highFreq=highFreq_LP)
else:
filtered_data = raw_data[0]
#Calculate PVAR
PVar = self.AcousticAnalysis.calculatePVar(filtered_data)
#PVAr > 25 were observed for events from the wall from 1 specific run!
#We dont know what to do with those yet.
#if PVar<20:
PVar_Neutron_List.append(PVar)
##########Plotting#########
hist_bins = numpy.arange(10,13.0,0.1)
#hist_bins=20
plt.hist(PVar_Neutron_List, bins=hist_bins, normed=True, facecolor='green', alpha=0.75)
plt.grid(True)
plt.xlabel("PVar")
plt.ylabel("Count")
plt.title("PVar of Entire Dataset")
#### ALPHA DATA ####
PVar_Alpha_List = []
EventList = self.PassportOffice.CheckPassport_Runtype("Alpha")
for Event in EventList:
#get raw data
raw_data = self.LoadWaveform.LoadData(Event['Path'][:-3])
#Apply filter. See the docstring
#for options
if doFilter:
filtered_data = self.AcousticAnalysis.ApplyFiltersWall(raw_data[0], lowFreq=lowFreq_HP, highFreq=highFreq_LP)
else:
filtered_data = raw_data[0]
PVar = self.AcousticAnalysis.calculatePVar(filtered_data)
PVar_Alpha_List.append(PVar)
#.........这里部分代码省略.........
示例10: __init__
# 需要导入模块: from PyDbLite import Base [as 别名]
# 或者: from PyDbLite.Base import create [as 别名]
class PressureVeto:
#We need the run number for init. We will use PyDBLite
#so we need to gen the db first. There will be another
#function for that. The reason we use this is because
#of native python compatibility
def __init__(self, RunNumber):
#property self.RunNumber assigned.
#This is typecasted to string for manipulation
self.RunNumber = str(RunNumber)
#property self.PyDB -> Database for pressures
self.PyDB = Base('pressures/'+self.RunNumber+'.dbl')
#check if the DB exists. If Yes, open, if not
#create it:
if not self.PyDB.exists():
self.genPDL()
else:
self.PyDB.open()
#Define the time iteration between bubbles minimum threshold
#Remember, each iteration is 1/10th second!
#Iter must be integer!
minSecondsBetweenBubbles = 4
self.minIterBetweenBubbles = int(minSecondsBetweenBubbles*10)
#Funtion to generate PyDBLite database
#I will deliberately not give this MySQL abilities
#since I dont want my data wiped out by "mistake"
#The human veto has to be in here somewhere.
def genPDL(self):
#Create the PDL file for database
self.PyDB.create('id','temp','pressure','time', mode = "override")
#import CSV for CSV file ops. Import ONLY if needed, so its here.
import csv
#filename in CSV file. Assumption -> RunID.csv
fname_csv = self.RunNumber+".csv"
PTcsv = csv.reader(open(fname_csv))
#convert CSV to PyDB line by line
for line in PTcsv:
self.PyDB.insert(id = int(line[0]),temp=float(line[1]), pressure=float(line[2]), time=datetime.strptime(line[3], "%Y-%m-%d %H:%M:%S"))
#Commit the database
self.PyDB.commit()
#Print a confirmation
print "Creating PyDB complete."
#this function finds the "peaks" in the pressures.
#Criterion: Peaks are above 30 PSI
def findBubbleTimings(self):
'''Finds the bubble timings
In -> Pressure data
Out -> Timings (datetime.datetime)
Assumptions -> Bubble PSI > 30 PSI
'''
#Select records with pressure > 30.0 PSI
recs = [r for r in self.PyDB]
#Make an iterator of this list
RecIter = itertools.islice(recs, None)
#Declare memory space for:
#Valid Bubbles
#Temporary Storage
#Last Record's ID (to stop Iterator)
ValidBubbles = []
_VBubbleAmpTemporaryStorage = []
RecLastID = recs[-1:][0]['__id__']
#Go record by record:
for record in RecIter:
#If pressure > 30:
if record['pressure'] >= 30.0:
#Assign the temporary memory with present pressure, time
_VBubbleAmpTemporaryStorage = [record['pressure'], record['time'], record['temp']]
#Number of steps to iter so we dont go beyond the last rec
stepsTillLastRec = RecLastID - record['__id__']
stepsIter = self.minIterBetweenBubbles if ( stepsTillLastRec > self.minIterBetweenBubbles) else stepsTillLastRec
#Investigate for next minIterBetweenBubbles for a maxima
for i in xrange(stepsIter):
#Progress iterator by 1
record = RecIter.next()
#is present iteration > memory stored variable? Yes: Store it, No: Continue
_VBubbleAmpTemporaryStorage = [record['pressure'], record['time'], record['temp']] if record['pressure']>=_VBubbleAmpTemporaryStorage else _VBubbleAmpTemporaryStorage
#The local maxima is found, store it as good data, continue searching
ValidBubbles.append(_VBubbleAmpTemporaryStorage)
#clear the temporary space
_VBubbleAmpTemporaryStorage = []
#Return the time cut!
return ValidBubbles
示例11: __init__
# 需要导入模块: from PyDbLite import Base [as 别名]
# 或者: from PyDbLite.Base import create [as 别名]
class SignalManip:
#usual stuff in init
def __init__(self):
self.AnalysisResults = Base('AnalysisResults/AnalysisResults.pdl')
#check if the DB exists. If Yes, open, if not
#create it:
if not self.AnalysisResults.exists():
self.genPDL()
else:
self.AnalysisResults.open()
self.PassportOffice = EventPassport.EventPassportOffice()
self.LoadWaveform = WaveformLoader.LoadWave()
self.AcousticAnalysis = AnalysisTools.AcousticsAnalysis()
#If DB doesnt exist, make it!
def genPDL(self):
#Create the PDL file for database
self.AnalysisResults.create('EventID','PVar', mode = "open")
#Function to generate signal average
def genSignalAverage(self, EventType = "Neutron"):
#get all Events of type EventType
EventList = []
EventList = self.PassportOffice.CheckPassport_Runtype(EventType)
SignalAvgMem = numpy.zeros((50000))
for Event in EventList:
#Load Raw data
raw_data = self.LoadWaveform.LoadData(Event['Path'][:-3])
SignalAvgMem += raw_data[0]
SignalAvgMem /= len(EventList)
####Storage#####
Storage = open("AnalysisResults/signalAvg."+EventType+".binary", "wb")
SignalAvgMem.tofile(Storage, format="%f")
Storage.close()
return SignalAvgMem
#function to generate FFT avergae
def genFFTAverage(self, EventType="Neutron", doWin = False, winStart=10000, winEnd=30000, Fs = 1250000.0):
#get all Events of type EventType
EventList = []
EventList = self.PassportOffice.CheckPassport_Runtype(EventType)
FFTAvgMem = numpy.zeros((50000))
FFTAvgBins = numpy.fft.fftfreq(len(FFTAvgMem), 1.0/Fs)
for Event in EventList:
#Load Raw data
raw_data = self.LoadWaveform.LoadData(Event['Path'][:-3])
####SignalWindow####
if doWin:
print "is it"
TempSigMem = numpy.zeros((50000))
TempSigMem[winStart:winEnd] = raw_data[0][winStart:winEnd]
R_data = TempSigMem
else:
R_data = raw_data[0]
#
FFTs = numpy.fft.fft(R_data)
#for i in range(5000,6000):
#pwrspec = abs(numpy.mean(FFTs[5000:6000]))
#if pwrspec>10:
# print pwrspec, Event
FFTAvgMem += FFTs
FFTAvgMem /= len(EventList)
####Storage#####
#FFT#
Storage = open("AnalysisResults/FFTAvg."+EventType+"win"+str(doWin)+".binary", "wb")
FFTAvgMem.tofile(Storage, format="%f")
Storage.close()
#FFT FREQS#
Storage = open("AnalysisResults/FFTAvgBins."+EventType+"win"+str(doWin)+".binary", "wb")
FFTAvgBins.tofile(Storage, format="%f")
#.........这里部分代码省略.........
示例12: __init__
# 需要导入模块: from PyDbLite import Base [as 别名]
# 或者: from PyDbLite.Base import create [as 别名]
class EventPassportOffice:
#what do we need in init?
#pressure run ID number
#acoustic ID number
#(btw marking those separate is a bad idea on the operators part)
def __init__(self):
self.EventPassport = Base('EventPassport/EventPassport.pdl')
#check if the DB exists. If Yes, open, if not
#create it:
if not self.EventPassport.exists():
self.genPDL()
else:
self.EventPassport.open()
self.CleanEvents = CleanEvents.CleanData()
def genPDL(self):
#Create the PDL file for database
self.EventPassport.create('EventID','Temperature','Pressure','Time', 'RunNumber','Path', 'RunType', mode = "open")
#RunNumber is defined as RunNumberAcoustic
#Runtype can be neutron or alpha
def genPassport(self, Path, RunNumberAcoustic, RunNumberPressure, RunType_WS):
FilteredData = self.CleanEvents.MatchEvent_PressurePiezo(Path, str(RunNumberAcoustic), str(RunNumberPressure))
#Get the last EventID
recs = [ Record['EventID'] for Record in self.EventPassport if Record['RunNumber'] == RunNumberAcoustic]
if len(recs) == 0:
EID = str(RunNumberAcoustic)+"0001"
EID = int(EID)
else:
EID = max(recs)+1
#check if we have a duplicate!
for DataPoint in FilteredData:
timestamp = DataPoint[1]
#Check if we have a dupe/conflict
x = [Event for Event in self.EventPassport if Event['Time']-timedelta(seconds=2)<=timestamp<=Event['Time']+timedelta(seconds=2)]
if len(x) == 0:
self.EventPassport.insert(EventID = EID ,Temperature = DataPoint[3],Pressure = DataPoint[2],Time = DataPoint[1], RunNumber = RunNumberAcoustic, Path = DataPoint[0], RunType = RunType_WS)
EID += 1
print("Inserting Entry ...")
else:
print "Duplicate entry found at: "+str(DataPoint[1])+" Event ID: "+str(x[0]['EventID'])
self.EventPassport.commit()
def CheckPassport_RunNumber(self, RunNumberQry):
return self.EventPassport(RunNumber = RunNumberQry)
def CheckPassport_Temperature(self, HighTemp, LowTemp):
return self.EventPassport(HighTemp>Temperature>LowTemp)
def CheckPassport_Time(self, fromTime, toTime):
recs = [ r for r in self.EventPassport if fromTime < r['Time'] < toTime]
return recs
def SizeofPassportDB(self):
return len(self.EventPassport)
def CheckPassport_Runtype(self, runtype_WS):
return self.EventPassport(RunType = runtype_WS)
def CheckPassport_eventID(self, EventID_WS):
return self.EventPassport(EventID = EventID_WS)
def _deleteEvent(self, RecID_WS):
del self.EventPassport[RecID_WS]
self.EventPassport.commit()
示例13: Organizer
# 需要导入模块: from PyDbLite import Base [as 别名]
# 或者: from PyDbLite.Base import create [as 别名]
class Organizer(Cacheable):
"""
This is the base class for organizers
"""
def __init__(self, cache, recursive=True):
Cacheable.__init__(self)
self.cache = cache
self.recursive = recursive
self.transformed = None
# Do not call reset here, it is called from fs.py when the fs is
# already started
def reset(self):
if not self.transformed:
self.transformed = Base(DB_TRANSFORMED)
self.transformed.create('realpath', 'path', 'dirname', mode='override')
self.transformed.create_index('realpath')
self.transformed.create_index('path')
self.transformed.create_index('dirname')
self.cache.reset()
Cacheable.reset(self)
def updatecache(self):
self.generateallpaths()
def deletefromcache(self, path):
realpath = self.realpath(path)
logger.debug("deletefromcache(%s)" % realpath)
self.cache.deletefromcache(realpath)
for item in self.transformed.get_index('realpath')[realpath]:
self.transformed.delete(item)
def addtocache(self, path):
if not self.transformed.get_index('path')[path]:
realpath = self.realpath(path)
self.cache.addtocache(realpath)
self.addfile(realpath)
############################################
# Overwritable functions
def dirlist(self, path): #IGNORE:W0613
"""
Returns a list of (non-existent, generated, virtual) directories for a
given path. Default implementation.
"""
return []
def generatepaths(self, realpath):
"""
Generates paths for a given real path. A file can have more than one
transformed path. Default implementation.
"""
yield util.addtrailingslash(util.removeroot(realpath,
self.cache.filter.root))
def generaterealpath(self, path):
"""
Generates a real path for a inexistent path. Default implementation.
"""
return os.path.join(self.cache.filter.root, path[1:])
############################################
# General functions
def generateallpaths(self):
"""
Generates paths for all the files given by the cache and stores them
in self.transformed
"""
for realpath in self.cache.filelist():
if self.recursive:
# Add all sub-directories first
currentpath = self.cache.filter.root
for pathpart in util.pathparts(util.removeroot(realpath,
self.cache.filter.root)):
currentpath = os.path.join(currentpath, pathpart)
self.addfile(currentpath)
else:
self.addfile(realpath)
def addfile(self, realpath):
"""
Stores a file in self.transformed if not there already and returns the
paths for that file in the proxy file system
"""
logger.debug('addfile(%s)' % realpath)
if not util.ignoretag(util.removeroot(realpath,
self.cache.filter.root)):
return []
self.refreshcache()
transformed = self.transformed.get_index('realpath')[realpath]
if transformed:
return (record['path'] for record in transformed)
else:
paths = []
#.........这里部分代码省略.........
示例14: TagOrganizer
# 需要导入模块: from PyDbLite import Base [as 别名]
# 或者: from PyDbLite.Base import create [as 别名]
class TagOrganizer(Organizer):
def __init__(self, cache, category=None):
self.tags = None
self.category = category
Organizer.__init__(self, cache, False)
def reset(self):
if not self.tags:
self.tags = Base(DB_FILE_TAGS)
self.tags.create('realpath', 'category', 'tag', mode = 'override')
self.tags.create_index('realpath')
self.tags.create_index('category')
Organizer.reset(self)
def updatecache(self):
self._generatetags()
Organizer.updatecache(self)
def _deletefromcache(self, path):
realpath = self.realpath(path)
logger.debug("_deletefromcache(%s)" % realpath)
for tag in self.tags.get_index('realpath')[realpath]:
self.tags.delete(tag)
def deletefromcache(self, path):
self._deletefromcache(path)
Organizer.deletefromcache(self, path)
def addtocache(self, path):
self._deletefromcache(path)
self.generatetags(self.realpath(path))
Organizer.addtocache(self, path)
def generatepaths(self, realpath):
for record in self.tags.get_index('realpath')[realpath]:
yield os.path.join(os.sep, record['tag'],
os.path.basename(realpath))
def dirlist(self, path):
if path == '/':
return self.taglist(self.category)
else:
return []
############################################
# Tag functions
def _generatetags(self):
for filename in filter(util.ignoretag, #IGNORE:W0141
self.cache.filelist()):
self.generatetags(filename)
def generatetags(self, filename):
pass
def tag(self, realpath, category, tag):
logger.debug('tag(%s, %s, %s)' % (realpath, category, tag))
if not tag == None and not tag == '':
self.tags.insert(realpath, category, tag)
def filelistbytags(self, category, tags):
self.refreshcache()
for record in self.tags.get_index('category')[category]:
if record['tag'] in tags:
yield os.path.basename(record['realpath'])
def taglist(self, category):
self.refreshcache()
return util.unique([record['tag'] for record in
self.tags.get_index('category')[category]])
示例15: Base
# 需要导入模块: from PyDbLite import Base [as 别名]
# 或者: from PyDbLite.Base import create [as 别名]
from selenium import webdriver
from selenium.webdriver.support.ui import Select
from PyDbLite import Base
from facepy import *
import time
import winsound
import getpass
db = Base('grades.db')
db.create('grade', 'course', mode='open')
graph = GraphAPI("my_facebook_api_key")
username = raw_input("Username: ")
password = getpass.getpass("Password: ")
while True:
# Loading PhantomJS on UNIX
driver = webdriver.PhantomJS()
# Loading PhantomJS on Windows
#driver = webdriver.PhantomJS('C:\phantomjs-1.9.7-windows\phantomjs.exe')
try:
driver.get("http://ent.unr-runn.fr/uPortal/")
select = Select(driver.find_element_by_name('user_idp'))
select.select_by_visible_text('ENSICAEN')
driver.find_element_by_id('IdPList').submit()
driver.find_element_by_id('username').send_keys(username)
driver.find_element_by_id('password').send_keys(password)