当前位置: 首页>>代码示例>>Python>>正文


Python Database.update方法代码示例

本文整理汇总了Python中CodernityDB.database.Database.update方法的典型用法代码示例。如果您正苦于以下问题:Python Database.update方法的具体用法?Python Database.update怎么用?Python Database.update使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在CodernityDB.database.Database的用法示例。


在下文中一共展示了Database.update方法的8个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。

示例1: main

# 需要导入模块: from CodernityDB.database import Database [as 别名]
# 或者: from CodernityDB.database.Database import update [as 别名]
def main():
    db = Database('/tmp/tut_update')
    db.create()
    x_ind = WithXIndex(db.path, 'x')
    db.add_index(x_ind)

    # full examples so we had to add first the data
    # the same code as in previous step

    for x in xrange(100):
        db.insert(dict(x=x))

    for y in xrange(100):
        db.insert(dict(y=y))

    # end of insert part

    print db.count(db.all, 'x')

    for curr in db.all('x', with_doc=True):
        doc = curr['doc']
        if curr['key'] % 7 == 0:
            db.delete(doc)
        elif curr['key'] % 5 == 0:
            doc['updated'] = True
            db.update(doc)

    print db.count(db.all, 'x')

    for curr in db.all('x', with_doc=True):
        print curr
开发者ID:abhishekgahlot,项目名称:codernitydb,代码行数:33,代码来源:quick_update.py

示例2: Store

# 需要导入模块: from CodernityDB.database import Database [as 别名]
# 或者: from CodernityDB.database.Database import update [as 别名]

#.........这里部分代码省略.........
        backups = os.listdir(backups_path)
        if "latest" in backups:
            backups.remove("latest")
        return backups

    def is_journal_complete(self):
        journal_path = self.get_journal_path()
        if (os.path.exists(journal_path)):
            if (os.path.isfile(os.path.join(journal_path, "journal_complete"))):
                return True
            elif (os.path.isfile(os.path.join(journal_path, "journal_incomplete"))):
                print("Clearing Journal")
                self.remove_incomplete_journal()
                os.remove(os.path.join(journal_path, "journal_incomplete"))
                self.rebuildDB()
                return False
        return False

    def remove_incomplete_journal(self):
        journal_path = self.get_journal_path()
        for file_object in os.listdir(os.path.join(journal_path, "objects")):
            os.remove(os.path.join(journal_path, "objects", file_object))
        for file_object in os.listdir(os.path.join(journal_path, "backups")):
            os.remove(os.path.join(journal_path, "backups", file_object))

    def write_to_journal(self, command):
        journal_path = self.get_journal_path()
        with open(os.path.join(journal_path, "journal_incomplete"), "a") as TF:
            TF.write(command + "\n")
            TF.close()

    def finish_journal(self):
        for key, value in self.objects_counter.iteritems():
                if value["operation"] == "update" and value["value"] == 0:
                    self.removeObject(key)
                else:
                    self.write_to_journal(value["operation"] + " " + key + " " + str(value["value"]))
        if os.path.exists(os.path.join(self.get_journal_path(), "journal_incomplete")):
            journal_file = open(os.path.join(self.get_journal_path(), "journal_incomplete"), "r+")
            uniqlines = set(journal_file.readlines())
            journal_file.close()
            journal_file = open(os.path.join(self.get_journal_path(), "journal_incomplete"), "w")
            journal_file.writelines(uniqlines)
            journal_file.close()
            self.file_rename(os.path.join(self.get_journal_path(), "journal_incomplete"), "journal_complete")

    def commit(self):
        print("Committing Journal")
        journal_path = self.get_journal_path()
        if (os.path.exists(self.get_latest_path())):
            os.remove(self.get_latest_path())
        if (self.is_journal_complete()):
            with open(os.path.join(journal_path, "journal_complete"), "rb") as TF:
                for command in TF:
                    words = command.split()
                    if (words[0] == "move"):
                        file_path, file_name = os.path.split(words[2])
                        if not os.path.exists(file_path):
                            os.mkdir(file_path)
                        shutil.move(words[1], words[2])
                        #os.rename(words[1], words[2])
                    elif (words[0] == "remove"):
                        os.remove(words[1])
                    elif (words[0] == "rmdir"):
                        shutil.rmtree(words[1])
                    elif (words[0] == "insert"):
开发者ID:PapajaLM,项目名称:Backuping,代码行数:70,代码来源:store.py

示例3: main

# 需要导入模块: from CodernityDB.database import Database [as 别名]
# 或者: from CodernityDB.database.Database import update [as 别名]
def main():
    db2 = pickledb.load('examlple.db', True)
    db2.set('test', 'test')

    db = Database('/home/papaja/Zaloha/target/store.db')
    db.open()
    # db.create()
    # print database
    # x_ind = WithHashIndex(db.path, 'hash')
    # pointer_ind = WithHashIndex(db.path, 'pointer')
    # db.add_index(x_ind)
    # db.add_index(pointer_ind)
    # db.insert({'hash':'3f8ee76c84d95c3f4ed061db98694be57e7d33da', 'pointer':1})
    # # for x in xrange(100):
    #     db.insert(dict(x='3f8ee76c84d95c3f4ed061db98694be57e7d33da'))
    # for curr in db.all('id'):
    #     curr['x'] = 1
    #     db.update(curr)
    #     print curr
    for curr in db.all('id'):
         print curr
    try:
        test = db.get('hash', '3f8ee76c84d95c3f4ed061db98694be57e7d33da', with_doc=True)
        print test
    except RecordNotFound:
        print "Nieje rekord"
    exit()
    test['doc']['pointer'] = test['doc']['pointer'] + 1
    db.update(test['doc'])
    for curr in db.all('id'):
         print curr
    exit()

    lstat = os.lstat("/home/papaja/.cache/keyring-SZ5Lrw/gpg")
    mode = lstat.st_mode
    if S_ISDIR(mode):
        print("dir")
    elif S_ISREG(mode):
        print("file")
    elif S_ISLNK(mode):
        print("link")
    else:
        print("None")
        print(mode)
        print(lstat)
        print(S_ISFIFO(mode))
    exit()
    #print(os.readlink('/home/papaja/Zaloha/target/objects/test'))
    #shutil.move("/home/papaja/Zaloha/target/journal/objects/a3fe40b52ec03a7e2d8c8c0ca86baaf0192038c5.meta", "/home/papaja/Zaloha/target/objects")
    #shutil.rmtree(os.path.join("/home/papaja/", "objects"))
    # myFile = MyFile('/home/papaja/third')
    # print(myFile.readline().decode("UTF-8"))
    # dst = open('/home/mint/Diplomovka/first', 'wb')
    # src = open('second', 'rb')
    # synced = open('/home/papaja/third', 'wb')
    # signatureFile = open('signature', 'wb')
    # deltaFile = open('/home/papaja/delta', 'rb');
    # hashes = pyrsync2.blockchecksums(dst)
    # hashes_save = {
    #     weak: (index, strong) for index, (weak, strong)
    #     in enumerate(hashes)
    # }
    # signature.write(bytes('gz\n', "UTF-8"))
    # pickle.dump(hashes_save, signature, pickle.HIGHEST_PROTOCOL)
    # type = signature.readline().decode("UTF-8")
    # print("Typ {}".format(type.strip()))
    # signature.readline()
    # hashes_save = pickle.load(signature)
    # print(hashes_save)
    # delta = pyrsync2.rsyncdelta(src, hashes_save)
    # pyrsync2.patchstream(dst, synced, delta)
    # io.FileIO
    # signature = librsync.signature(dst)
    # delta = librsync.delta(src, signature)
    # librsync.patch(dst, delta, synced)
    # synced.close()
    temp = tempfile.NamedTemporaryFile()
    skuska = open(temp.name, "wb")
    dst = open('/home/mint/Diplomovka/first', 'rb')
    velkost = open('/home/mint/Diplomovka/velkost', 'rb')
    retazec = 'ahoj'
    print(len(retazec))
    print(velkost.readline())
    print(velkost.read(3))
    #velkost.write(str(sys.getsizeof(retazec)))
    dst_data = dst.read(16)
    while dst_data:
        skuska.write(dst_data)
        dst_data = dst.read(16)
    skuska.close()
    patchProcess = subprocess.Popen(['rdiff', 'patch', temp.name, '/home/mint/Diplomovka/delta'], stdout=subprocess.PIPE)
    patchFile, patchError = patchProcess.communicate()
    # print patchFile
    # dst_data = dst.read(16)
    while dst_data:
        #patchProcess.stdin.write(dst_data)
        dst_data = dst.read(16)
    # # patchProcess.stdin.write(dst_data)
    #patchProcess.stdin.write(dst_data)
    #patchProcess.stdin.close()
#.........这里部分代码省略.........
开发者ID:PapajaLM,项目名称:Backuping,代码行数:103,代码来源:test.py

示例4: Database

# 需要导入模块: from CodernityDB.database import Database [as 别名]
# 或者: from CodernityDB.database.Database import update [as 别名]
if __name__ == '__main__':
    from CodernityDB.database import Database
    db = Database('/tmp/db_test')
    db.create()
    db.add_index(MultiIndex(db.path, 'multi'))
    for x in xrange(2):
        d = dict(l=range(10 * x, 10 * (x + 1)))
        db.insert(d)
    for curr in db.all('multi'):
        print curr

    for curr in db.all('id'):
        nl = map(lambda x: x * 10, curr['l'])
        curr['l'] = nl
        db.update(curr)

    for curr in db.all('multi'):
        print curr

    for curr in db.all('id'):
        nl = map(lambda x: x % 3, curr['l'])
        curr['l'] = nl
        print nl
        db.update(curr)

    for curr in db.all('multi'):
        print curr

    for curr in db.get_many('multi', key=1, limit=-1):
        print curr
开发者ID:PapajaLM,项目名称:Backuping,代码行数:32,代码来源:multi_index.py

示例5: __init__

# 需要导入模块: from CodernityDB.database import Database [as 别名]
# 或者: from CodernityDB.database.Database import update [as 别名]
class SecuIn:
	'''
	Handles all data input into the database

	'''
	def __init__(self,passkey):
		self.key = passkey


		self.initQuestions = SecuQ(self.key)

		self.DBConfig = AppConfig()
		self.dbName = self.DBConfig.mapget('databaseinfo')['databasename']

		self.db = Database(self.dbName)


		initDay = DayEntry(self.key) # checks day hash or creates a new one
		self.dayKey = initDay.dayKey



	def questionDataIN(self,data):
	
		'''
		Data IN:
		{'a' : 2, 'b': 14 , 'c': 11, 'd': 43, 'note' : 'hello'}
	
		or
	
		{ 'b': 14 , 'c': 11, 'd': 43, 'note' : 'hello'} 
		 some entries may be missing
	
	
		Data OUT: (NEVER DELETE ANYTIHNG :) )
	
		{'date' : xx , _id: ###date2### , 'a':{'xxdate3xx':2},
						'b':{'xxdate3xx':14},
						'c':{'xxdate3xx':11},
						'note':{'xxdate3xx':'you'}}
	
	
		{'date' : xx , _id: ###date1### , 'a':{'xxdate1xx':1,'xxdate2xx':2},
						'b':{'xxdate1xx':14,'xxdate2xx':14},
						'c':{'xxdate1xx':11,'xxdate2xx':11},
						'note':{'xxdate2xx':'hello','xxdate3xx':'you'}
	
	
		'''


		timeIN = getTimeStamp() #get now time
		#initialize new questions
		
	
		# get data, as doc {'date':'xx/xx/xxTxx:xx:xxxx','question1':'x','question2':'x'}, same as dic format

		if(self.db.exists()):
			self.db.open()
			self.db.id_ind.enc_key = self.key
			dayrow = self.db.get('id', self.dayKey, with_doc=True)
	
			#this function assumes database already opened
			# this is gonna be a tuple that is inserted directly

	
			#convert data from javasript to python dict/json
			# if (type(data) is str):
			dataIN=eval(data) #{ 'b': 14 , 'c': 11, 'd': 43, 'note' : 'hello'}
			datachanged = dataIN.keys()





			for question in datachanged:
				try:
					dayrow[question][timeIN] = dataIN[question]
				except KeyError: #first write to key, initiate
					dayrow[question] = {}
					dayrow[question][timeIN] = dataIN[question]

			

			self.db.update(dayrow) 
			self.db.close()
			self.initQuestions.questionsValidate(datachanged) #insert questions whos data had changed

			#if all ok!
			return True
开发者ID:fflowres,项目名称:Scripts,代码行数:92,代码来源:DBInputClass.py

示例6: __init__

# 需要导入模块: from CodernityDB.database import Database [as 别名]
# 或者: from CodernityDB.database.Database import update [as 别名]
class cache :
    """
        cache for word morphological analysis
    """
    DB_PATH = os.path.join(os.path.expanduser('~'), '.thaalabCache')
    def __init__(self, cache_path=False):
        """
        Create Analex Cache
        """
        # use this dictionary as a local cache,
        # The global db will be updated on destructing object
        # get the database path
        if hasattr(sys, 'frozen'): # only when running in py2exe this exists
            base = sys.prefix
        else: # otherwise this is a regular python script
            base = os.path.dirname(os.path.realpath(__file__))
        if not cache_path:
            file_path = self.DB_PATH
        else:
            file_path = os.path.join(os.path.dirname(cache_path), '.thaalabCache')
        
        self.cache={};
        self.db = Database(file_path)
        if not self.db.exists():
            self.db.create();
            x_ind = WithAIndex(self.db.path, 'a')
            self.db.add_index(x_ind)        
        else:
            self.db.open();

    def __del__(self):
        """
        Delete instance and clear cache
        
        """
        self.cache=None;
        self.db.close();

    def update(self):
        """update data base """
        #~ pass
        for word in self.cache:
            self.add_checked(word, self.cache[word])        

    def is_already_checked(self, word):
        try:
            return bool(self.db.get('a', word))
        except:
            return False
        #~ except: return False;

    def get_checked(self, word):
        try:
            x = self.db.get('a', word, with_doc=True)
            y = x.get('doc',False);
            if y: 
                return y.get('d',[])
            else: return []
        except:
            return []
    
    def add_checked(self, word, data):
        idata = {"a":word,'d':data}
        try:
            saved = self.db.get('a', word, with_doc=True)
        except:
            saved = False
        if saved:
            saved['doc']['d'] = data
            doc  = saved['doc']
            doc['update'] = True
            self.db.update(doc)
        else:
            self.db.insert(idata)

    
    def exists_cache_word(self, word):
        """ test if word exists in cache"""
        #if exists in cache dictionary
        if word in self.cache:
            return True
        else: # test in database
            if self.is_already_checked(word):
                stored_data = self.get_checked(word)
                self.cache[word] = stored_data
                return bool(self.cache[word])
            else:
                # add null dict to the word index to avoid multiple database check
                self.cache[word] = {}
                return {}            

    
    def get_relation_freq(self, word_prev, word_cur, relation):
        self.exists_cache_word(word_prev)
        return self.cache.get(word_prev, {}).get(word_cur, {}).get(relation, 0);
    
    def is_related(self, word_prev, word_cur):
        """ test if two words are related"""
        #serach in cache
        self.exists_cache_word(word_prev)
#.........这里部分代码省略.........
开发者ID:linuxscout,项目名称:mishkal,代码行数:103,代码来源:cache.py

示例7: CodernityDataStore

# 需要导入模块: from CodernityDB.database import Database [as 别名]
# 或者: from CodernityDB.database.Database import update [as 别名]
class CodernityDataStore(object):
    PATH_TYPE = 'path'

    def __init__(self, db_path):
        self.db = Database(db_path)
        if self.db.exists():
            self.db.open()
        else:
            self.db.create()
            path_index = PathIndex(self.db.path, 'path')
            self.db.add_index(path_index)
            path_added_index = PathAddedIndex(self.db.path, 'path_added')
            self.db.add_index(path_added_index)

    @classmethod
    def dt_str(cls, datetime):
        return datetime.isoformat()[0:19]

    def add_video(self, path, video, added=None):
        logger.debug("add_video(%s, %s, %s)", path, video, added)
        added = added or datetime.utcnow()

        existing = list(self.db.get_many('path', path, with_doc=True))

        video_data, video_type = Serializer.serialize_video(video)
        data = dict(_t=self.PATH_TYPE, path=path, video_data=video_data, video_type=video_type,
                    downloads=dict(), added=self.dt_str(added))
        self.db.insert(data)

        for existing_path in existing:
            self.db.delete(existing_path['doc'])

    def add_download(self, path, provider, sub_id, language, score):
        logger.debug("add_download(%s, %s, %s, %s, %d)", path, provider, sub_id, language, score)
        data = self.db.get('path', path, with_doc=True)
        path = data['doc']
        download = dict(provider=provider, sub_id=sub_id, lang=str(language), score=score)
        if str(language) in path['downloads']:
            path['downloads'][str(language)].append(download)
        else:
            path['downloads'][str(language)] = [download]
        self.db.update(path)

    def get_downloads_for_video(self, path):
        logger.debug("get_downloads_for_video(%s)", path)
        data = self.db.get('path', path, with_doc=True)
        return data['doc']['downloads']

    @staticmethod
    def exceeds_desired_score(video, score, desired_movie_score, desired_episode_score):
        if isinstance(video, Episode):
            return score >= desired_episode_score
        elif isinstance(video, Movie):
            return score >= desired_movie_score

    def get_incomplete_videos(self, languages, desired_movie_score, desired_episode_score, ignore_older_than):
        logger.debug("get_incomplete_videos(%s, %d, %d, %s)", languages, desired_movie_score, desired_episode_score, ignore_older_than)
        within_date = self.db.get_many('path_added', start=self.dt_str(ignore_older_than), with_doc=True)
        results = []
        for path in (data['doc'] for data in within_date):
            video = Serializer.deserialize_video(path['video_type'], path['video_data'])
            needs = []
            for lang in languages:
                if str(lang) in path['downloads']:
                    current_score = max(download['score'] for download in path['downloads'][str(lang)])
                    if not self.exceeds_desired_score(video, current_score, desired_movie_score, desired_episode_score):
                        needs.append(dict(lang=lang, current_score=current_score))
                else:
                    needs.append(dict(lang=lang, current_score=0))
            if needs:
                results.append(dict(path=path['path'], video=video, needs=needs))

        logger.debug("found %d incomplete videos: %s", len(results), results)
        return results

    def close(self):
        self.db.close()
开发者ID:NigelRook,项目名称:superliminal,代码行数:79,代码来源:datastore.py

示例8: questionGet

# 需要导入模块: from CodernityDB.database import Database [as 别名]
# 或者: from CodernityDB.database.Database import update [as 别名]

#.........这里部分代码省略.........
						self.aggregate[question] = oQ[question]
				except KeyError:
					pass
	
				try:
					
					if oQ[question]['multipoint'] == 'True':
						self.multipoint[question] = oQ[question]
				except KeyError:
					pass

			self.db.close()

	
			return True 

			'''
			Qinfo=
			{
			'a':{'active':'True','typ':'slider','range':'0-100','aggregate':True, 'multipoint':True},
			
			'b':{'active':'True','typ':'slider','range':'0-100','aggregate':True, 'multipoint':False},
			
			'c':{'active':'True','typ':'slider','range':'0-100','aggregate':False, 'multipoint':True},
			
			'd':{'active':'True','typ':'slider','range':'0-100','aggregate':False, 'multipoint':False},
			
			'note':{'active':'True','typ':'note', 'multipoint':"False"}
			}
			'''


	def questionInsert(self,data,descriptor='inclusive'):# this will be a class later for ... infinite data

	
	
	
		
		if(self.db.exists()):
			self.db.open()
			self.db.id_ind.enc_key = self.key
			#select Qindex
			Qindex = self.db.get('id', self.indexdb.Qindex, with_doc=True)
	
			#must copy in this way for dictionaries or else all references are affected
			oQ= Qindex.copy()
			# delete unnessesary to modify
			# if u use del <key> it deletes all instances in all variables using same reference to dict
			oQ.pop('_rev', None)
			oQ.pop('_id', None)
			oQ.pop('t', None)
			oQ.pop('questions', None)
	
			# if (type(data) is str):
			nQL=eval(str(data))
	
	

	
			if (descriptor == "exclusive"):
			 #exclusive, new data always overwrites old data, deletes any data that is not new
			 # remmove old keys from row
				for key in oQ.keys(): #removes keys not in entry and overwrites everything
					if key not in nQL.keys():
						Qindex.pop(key,None)

	
			if (descriptor == "inclusive"): 
				#only overwrites data, keeps old data that is unnaffected
				pass
	
	
			#oQ.update(nQL) # update existing keys to be written
	
			
			Qindex.update(nQL) #updates existing keys in row
	
			self.db.update(Qindex) #updates NoSQL
			self.db.close()
			self.questionGet()
			return True
		else:
			print ('CANNOT LOAD self.db')
			return False
	
	
	
	def questionsValidate(self,data): #turns all uesd questions true, insert list of questions

		for question in data:
			if question in self.unInit.keys():
				updated = {}
				updated[question] = self.unInit[question]
				updated[question]['active'] = "True"
				self.questionInsert(str(updated),"inclusive")

		#update class variables
		self.questionGet()
	
		return True
开发者ID:fflowres,项目名称:Scripts,代码行数:104,代码来源:DBQuestionClass.py


注:本文中的CodernityDB.database.Database.update方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。