當前位置: 首頁>>代碼示例>>Python>>正文


Python ParseBatcher.batch_save方法代碼示例

本文整理匯總了Python中parse_rest.connection.ParseBatcher.batch_save方法的典型用法代碼示例。如果您正苦於以下問題:Python ParseBatcher.batch_save方法的具體用法?Python ParseBatcher.batch_save怎麽用?Python ParseBatcher.batch_save使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在parse_rest.connection.ParseBatcher的用法示例。


在下文中一共展示了ParseBatcher.batch_save方法的15個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Python代碼示例。

示例1: setup_ghosts

# 需要導入模塊: from parse_rest.connection import ParseBatcher [as 別名]
# 或者: from parse_rest.connection.ParseBatcher import batch_save [as 別名]
def setup_ghosts(g):
    """
    Create 1 - 50 Ghost objects by "batch_save"-ing them to Parse using 
    ParsePy's ParseBatcher().

    """

    # Start a function timer.
    function_start_time = time.time()

    # We must subclass Object for the class names we want to use from Parse.
    class Ghost(Object):
        pass

    list_of_Ghost_objects_to_upload = []

    for ghost_number in range(1, g + 1, 1):
        new_Ghost_object = Ghost(
            username = "Ghost Partner",
            ghostNum = ghost_number,
            firstName = "Ghost",
            sex = "G",
            array_eventsRegistered = [1,2,3,4,5,6,7,8,9,10]
        )
        list_of_Ghost_objects_to_upload.append(new_Ghost_object)

    batcher = ParseBatcher()
    batcher.batch_save(list_of_Ghost_objects_to_upload)

    print ("\n{} Ghost objects uploaded to Parse in {} seconds.\n".format(g, time.time() - function_start_time))
開發者ID:AlexChick,項目名稱:daeious-event-dev,代碼行數:32,代碼來源:__DEV__setup_Ghosts.py

示例2: accept_post

# 需要導入模塊: from parse_rest.connection import ParseBatcher [as 別名]
# 或者: from parse_rest.connection.ParseBatcher import batch_save [as 別名]
 def accept_post(self, post):
     snippet_posts = [Post.get(snip['objectId']) for snip in self.snippets]
     for snippet_post in snippet_posts:
         snippet_post.archived = True
         snippet_post.original_story = self
         
     batcher = ParseBatcher()
     batcher.batch_save(snippet_posts)
 
     self.snippets = []
     self.accepted_posts.append(post)
開發者ID:TheAppCookbook,項目名稱:snipppit,代碼行數:13,代碼來源:story.py

示例3: testCanBatchUpdate

# 需要導入模塊: from parse_rest.connection import ParseBatcher [as 別名]
# 或者: from parse_rest.connection.ParseBatcher import batch_save [as 別名]
    def testCanBatchUpdate(self):
        user = self._get_logged_user()
        phone_number = "555-0134"

        original_updatedAt = user.updatedAt

        user.phone = phone_number
        batcher = ParseBatcher()
        batcher.batch_save([user])

        self.assertTrue(User.Query.filter(phone=phone_number).exists(),
                        'Failed to batch update user data. New info not on Parse')
        self.assertNotEqual(user.updatedAt, original_updatedAt,
                            'Failed to batch update user data: updatedAt not changed')
開發者ID:danrobinson,項目名稱:ParsePy,代碼行數:16,代碼來源:tests.py

示例4: batchSaveList

# 需要導入模塊: from parse_rest.connection import ParseBatcher [as 別名]
# 或者: from parse_rest.connection.ParseBatcher import batch_save [as 別名]
 def batchSaveList(self, listOfParseObjects):
     if len(listOfParseObjects)==0:
         return;
         
     print 'batch saving objects'
     self.appendToLog('batch saving %d objects' % len(listOfParseObjects))
     
     #batch save a list of parseobjects. the batch limit is 50!
     batcher = ParseBatcher()
     batchLimit = 50
     while len(listOfParseObjects)> 0:
         #save the first @batchLimit amount of objects
         batcher.batch_save(listOfParseObjects[0:batchLimit])
         
         #clear the list of those saved objects
         listOfParseObjects = listOfParseObjects[batchLimit:]
開發者ID:daniman,項目名稱:dibs,代碼行數:18,代碼來源:reuse_parser.py

示例5: put_answers_in_Parse

# 需要導入模塊: from parse_rest.connection import ParseBatcher [as 別名]
# 或者: from parse_rest.connection.ParseBatcher import batch_save [as 別名]
def put_answers_in_Parse():

    # Query for first 1000 Answers
    queryset = list(Answer.Query.all().limit(1000))
    while True:
        if not queryset:
            print("No Answers to delete from Parse -- none exist.")
            break # skip to batch_save without deleting
        elif len(queryset) == len(_Answer.LI_A):
            print("{} Answers already exist in Parse.".format(len(queryset)))
            srsly_delete_stuff = raw_input("Continue with delete anyway? (Y/n): ")
            if srsly_delete_stuff != "Y":
                print "Delete skipped. Upload skipped."
                return
        else:
            print("There are {} Answers to delete from Parse.".format(len(queryset)))
            srsly_delete_stuff = raw_input("Delete Answers from Parse? (Y/n): ")
            if srsly_delete_stuff != "Y":
                print "Delete skipped. Upload skipped."
                return

        # batch_delete in chunks of no more than 50
        batcher = ParseBatcher()
        lili_chunks = [queryset[i:i+50] for i in range(0, len(queryset), 50)]
        for index, chunk in enumerate(lili_chunks):
            batcher.batch_delete(chunk)
            print "\r{} of {} Answers deleted from Parse".format(50*(index+1), len(queryset)),
            sys.stdout.flush()
        print
        break # go to batch_save

    # batch_save in chunks of no more than 50
    len_lia = len(_Answer.LIA)
    batcher = ParseBatcher()
    lili_chunks = [_Answer.LIA[i:i+50] for i in range(0, len_lia, 50)]
    for index, chunk in enumerate(lili_chunks):
        while True:
            try:
                batcher.batch_save(chunk)
                print "\r{} of {} Answers uploaded to Parse".format(50*(index+1), len_lia),
                sys.stdout.flush()
                break
            except:
                print("Locked. Sleeping for 5 seconds.")
                time.sleep(5)
    print
    pass
開發者ID:AlexChick,項目名稱:PCC,代碼行數:49,代碼來源:upload_my_answers_to_Parse.py

示例6: update_answers_in_Parse

# 需要導入模塊: from parse_rest.connection import ParseBatcher [as 別名]
# 或者: from parse_rest.connection.ParseBatcher import batch_save [as 別名]
def update_answers_in_Parse():

    # Get a list of all Answers in Parse.
    ct_a = Answer.Query.all().count()
    queryset = []
    batcher = ParseBatcher()
    print("{} Answers exist in Parse.".format(ct_a))
    if ct_a == 0: # None exist; upload whole list
        pass
    elif ct_a > 0: # There's at least 1 to get
        for i in range(0, ct_a, min(ct_a,1000)): # for each chunk of <= 1000 answers
            queryset += list(Answer.Query.all().skip(i).limit(1000)) # get the chunk, add to queryset
        queryset.sort(key = attrgetter("num"))
        for A, a in zip(queryset, [a for a in _Answer.LIA if queryset[a.num-1].num == a.num]): # for each answer with the same num
            # compare all attributes of the _Answer class.
            # if different, set Parse object's attribute to _Answer object's attribute;
            # if all are same, keep in Parse and delete from LIA
            for key in _Answer.LI_ATTR: # for all attributes of the _Answer class
                if getattr(A, key) != getattr(a, key): # if different
                    print(key, getattr(A,key), getattr(a,key))
                    batcher.batch_delete([A])
                    batcher.batch_save([a])
                    # print("{} updated in Parse".format(a.ID))
                    break
                elif _Answer.LI_ATTR[-1] == key:
                    _Answer.LIA.remove(a)
        print("{} Answers updated in Parse.".format(len(queryset)-len(_Answer.LIA)))
        print("{} Answers must be created in Parse.".format(len(_Answer.LIA)))

    # Now, upload those remaining in _Answer.LIA to Parse
    # (should put batch_upload_with_sleep in a separate function)
    # batch_save in chunks of no more than 50
    len_lia = len(_Answer.LIA)
    batcher = ParseBatcher()
    lili_chunks = [_Answer.LIA[i:i+50] for i in range(0, len_lia, 50)]
    for index, chunk in enumerate(lili_chunks):
        while True:
            try:
                batcher.batch_save(chunk)
                print "\r{} of {} Answers uploaded to Parse".format(50*(index+1), len_lia),
                sys.stdout.flush()
                break
            except:
                print("Locked. Sleeping for 5 seconds.")
                time.sleep(5)
    print
開發者ID:AlexChick,項目名稱:PCC,代碼行數:48,代碼來源:upload_my_answers_to_Parse.py

示例7: load_spell_data

# 需要導入模塊: from parse_rest.connection import ParseBatcher [as 別名]
# 或者: from parse_rest.connection.ParseBatcher import batch_save [as 別名]
def load_spell_data(db):
	conn = get_db_connection(db)
	curs = conn.cursor()
	find_central_index(curs, **{"type": "spell"})
	index_lines = curs.fetchall()
	batch = []
	batcher = ParseBatcher()
	count = 0
	for line in index_lines:
		spell = get_parse_spell(line['url'])
		if spell:
			batch.append(make_spell(conn, line, spell))
		else:
			batch.append(make_spell(conn, line))
		if len(batch) >= 50:
			batcher.batch_save(batch)
			batch = []
			count += 50
			print "Saving through %s" % count
	batcher.batch_save(batch)
開發者ID:PathfinderRPG,項目名稱:PSRD-Parser,代碼行數:22,代碼來源:parse_load_spells.py

示例8: put_questions_in_Parse

# 需要導入模塊: from parse_rest.connection import ParseBatcher [as 別名]
# 或者: from parse_rest.connection.ParseBatcher import batch_save [as 別名]
def put_questions_in_Parse():

    # Query for Questions
    queryset = list(Question.Query.all().limit(1000))
    while True:
        if not queryset:
            print("No Questions to delete from Parse -- none exist.")
            break
        elif len(queryset) == len(_Question.LI_Q):
            print("{} Questions already exist in Parse.".format(len(queryset)))
            srsly_delete_stuff = raw_input("Continue with delete anyway? (Y/n): ")
            if srsly_delete_stuff != "Y":
                print("Delete skipped. Upload skipped.")
                return
        else:
            print("There are {} Questions to delete from Parse.".format(len(queryset)))
            srsly_delete_stuff = raw_input("Delete Questions from Parse? (Y/n): ")
            if srsly_delete_stuff != "Y":
                print("Delete skipped. Upload skipped.")
                return

        # batch_delete in chunks of no more than 50
        batcher = ParseBatcher()
        lili_chunks = [queryset[i:i+50] for i in range(0, len(queryset), 50)]
        for index, chunk in enumerate(lili_chunks):
            batcher.batch_delete(chunk)
            print("\r{} of {} Questions deleted from Parse".format(50*(index+1), len(queryset)), end = "\r")
            sys.stdout.flush()
        print
        break

    # batch_save in chunks of no more than 50
    len_li_q = len(_Question.LIQ)
    batcher = ParseBatcher()
    lili_chunks = [_Question.LIQ[i:i+50] for i in range(0, len_li_q, 50)]
    for index, chunk in enumerate(lili_chunks):
        batcher.batch_save(chunk)
        print("\r{} of {} Questions uploaded to Parse".format(50*(index+1), len_li_q), end = "\r")
        sys.stdout.flush()
    print
    pass
開發者ID:AlexChick,項目名稱:PCC,代碼行數:43,代碼來源:upload_my_questions_to_Parse.py

示例9: testBatch

# 需要導入模塊: from parse_rest.connection import ParseBatcher [as 別名]
# 或者: from parse_rest.connection.ParseBatcher import batch_save [as 別名]
    def testBatch(self):
        """test saving, updating and deleting objects in batches"""
        scores = [GameScore(score=s, player_name="Jane", cheat_mode=False) for s in range(5)]
        batcher = ParseBatcher()
        batcher.batch_save(scores)
        self.assertEqual(GameScore.Query.filter(player_name="Jane").count(), 5, "batch_save didn't create objects")
        self.assertTrue(all(s.objectId is not None for s in scores), "batch_save didn't record object IDs")

        # test updating
        for s in scores:
            s.score += 10
        batcher.batch_save(scores)

        updated_scores = GameScore.Query.filter(player_name="Jane")
        self.assertEqual(
            sorted([s.score for s in updated_scores]), list(range(10, 15)), msg="batch_save didn't update objects"
        )

        # test deletion
        batcher.batch_delete(scores)
        self.assertEqual(GameScore.Query.filter(player_name="Jane").count(), 0, "batch_delete didn't delete objects")
開發者ID:hsenju,項目名稱:Goober,代碼行數:23,代碼來源:tests.py

示例10: filter

# 需要導入模塊: from parse_rest.connection import ParseBatcher [as 別名]
# 或者: from parse_rest.connection.ParseBatcher import batch_save [as 別名]
	time = meetString[1][:13]
	startTime, endTime = time.split('-')
	startTime += 'M'
	endTime += 'M'
	startTime = datetime.time(datetime.strptime(startTime, "%I:%M%p"))
	endTime = datetime.time(datetime.strptime(endTime, "%I:%M%p"))
	building = ''.join([i for i in meetString[2] if not i.isdigit()])
	roomNumber = filter(str.isdigit, meetString[2])

	return [m, t, w, r, f], startTime, endTime, building, roomNumber

with open('doc.json') as data_file:
	data = json.load(data_file)
data_to_upload = []
for course in range(len(data)):
	current = data[course]
	if current['Term'] == '20151' and current['Meets1'] != '' and 'RTBA' not in str(current['Meets1']):
		if current['DivisionCode'] == 'CC' or current['DivisionName'] == 'SCH OF ENGR & APP SCI: UGRAD' or current['DivisionCode'] == 'BC' or current['DivisionCode'] == 'GS':
				newClass = ParseObject()
				newClass.class_code = current['Course']
				newClass.instructor = current['Instructor1Name']
				newClass.name = current['CourseTitle']
				#call function that gets location, start, and end time
				newClass.days, newClass.startTime, newClass.endTime, newClass.building, newClass.roomNumber = parseMeetString(current['Meets1'])
				data_to_upload.append(newClass)

print "x"
batcher = ParseBatcher()
for x in range(0, len(data_to_upload), 50):
	batcher.batch_save(data_to_upload[x: x+50 if (x+50) < len(data_to_upload) else len(data_to_upload)])
開發者ID:pkathail,項目名稱:CU-Agora,代碼行數:32,代碼來源:readCourses.py

示例11: setup_ipads

# 需要導入模塊: from parse_rest.connection import ParseBatcher [as 別名]
# 或者: from parse_rest.connection.ParseBatcher import batch_save [as 別名]
def setup_ipads(i, purchaseDate = time.strftime("%Y.%m.%d")):

    """ Create i objects of the class "IPad" and upload them to Parse with ParsePy.

          *  'purchaseDate' is a string formatted like this: "2015.04.18".

          *   WARNING: Program currently assumes there are no existing IPad objects,
              so it starts at iPadNum = 1. Will be fixed to query the existing IPad object with
              the highest iPadNum and begin from that number + 1.                                     
    """

    # Start a function timer.
    function_start_time = time.time()

    # We must subclass Object for the class names we want to use.
    class IPad(Object):
        pass

    # Print the "function is starting" message.
    # (Later, I'd like to make a decorator that does this.)
    print ("\
        \n\n*********************************************************\
        \n*****                                               *****\
        \n*****   Function \"setup_ipads\" is now running.   *****\
        \n*****                                               *****\
        \n*****                                               *****\
        \n\n{} IPad objects are being created...".format(i))

    # Instantiate the list to upload.
    list_IPad_objects_to_upload = []

    # Get a (fictitious) list of i serial numbers for our new IPad objects.
    list_iPadSerialNumbers = get_s_ipad_serial_numbers(i)

    # Create new iPad objects and put them into a big ol' list.
    for index, serial_number in enumerate(list_iPadSerialNumbers):
        
        new_IPad_object = IPad(
            ipNum = index + 1,
            ipSerialNum = serial_number,
            purchaseDate = purchaseDate
            )
        list_IPad_objects_to_upload.append(new_IPad_object)

    print("Done.")

    # Upload the list of new iPad objects to Parse.
        # The Parse batch request limit is 50, and the Parse request limit is 30/sec = 1800/min.
        # Other functions are being run before and/or after this, so to avoid going over
        #     the 1800/min limit, call time.sleep(i/30 - time_spent_uploading). 
    
    # Create a ParseBatcher object.
    batcher = ParseBatcher()

    print ("\n{} IPad objects are being uploaded...".format(i))

    # Start an "uploading" timer.
    uploading_start_time = time.time()

    # Call batcher.batch_save on slices of the list no larger than 50.
    for k in range(i/50 + 1):
        ### lower = 50*k
        ### upper = 
        try:
            batcher.batch_save(list_IPad_objects_to_upload[
                50*k : 50*(k + 1)
                ])
        except:
            batcher.batch_save(list_IPad_objects_to_upload[
                50*k : i
                ])

    # Calculate time spent uploading and how long to sleep for.
    time_spent_uploading = round(time.time() - uploading_start_time, 3)
    how_long_to_sleep_for = (i/30.0) - time_spent_uploading
    how_long_to_sleep_for_rounded = round(how_long_to_sleep_for, 3)
    print("Done.\n")
    print("{} IPad objects uploaded in {} seconds.\n".format(i, time_spent_uploading))

    # Sleep.
    for k in range(1, 101, 1):
        sys.stdout.write("\r{}{} of {}s sleep complete.".format(k, "%", how_long_to_sleep_for_rounded)) # \r puts the cursor back to the start of the line i/o onto the next line
        sys.stdout.flush()
        time.sleep(how_long_to_sleep_for / 100.0)
    sys.stdout.write("\n") # move the cursor to the next line

    # Print results.
    function_total_time = round(time.time() - function_start_time, 3)

    print_str = "*****   Function \"setup_ipads({})\" ran in {} seconds.   *****".format(i, function_total_time)
    ast_str = "*" * (len(print_str))
    space_str = "*****   {}   *****".format(" "*(len(print_str) - 16))
    und_str = ("_" * (len(print_str))) + "\n" + ("=" * (len(print_str)))

    print ("\n\n{}\n{}\n{}\n{}\n{}\n{}\n\n".format(space_str, space_str, print_str, space_str, ast_str, und_str))
開發者ID:AlexChick,項目名稱:daeious-event-dev,代碼行數:97,代碼來源:__DEV__setup_IPads.py

示例12: register

# 需要導入模塊: from parse_rest.connection import ParseBatcher [as 別名]
# 或者: from parse_rest.connection.ParseBatcher import batch_save [as 別名]
	except Exception, e:
		country=''
	
	url = 'https://maps.googleapis.com/maps/api/place/nearbysearch/json?rankby=prominence&location='+lat+','+lng+'&key='+ keys['API_KEY']+'&radius=500000'
	response = urllib2.urlopen(url)
	obj2 = json.loads(response.read())['results']
	register(keys['APPLICATION_ID'], keys['REST_API_KEY'], master_key=keys['MASTER_KEY'])
	try:
		for landmark in obj2:
			parse(city.replace('+', ' '), landmark, country) 
	except Exception,e:
		print city, "Error:", e
	global videos
	if len(videos)>0:
		batcher = ParseBatcher()
		batcher.batch_save(videos)
		videos=[]

if __name__ == "__main__":
	with open("keys.txt") as myfile:
		for line in myfile:
			name, var = line.partition("=")[::2]
			keys[str(name.strip())] = str(var.strip())
	

	cities = []
	done = ['Goa', 'Kathmandu', 'Oslo', 'St Petersburg', 'Doha', 'Bucharest', 'Budapest', 'Stockholm', 'Al Ain', 'Abu Dhabi', 'Glasgow', 'Birmingham', 'Montreal', 'Chicago', 'Lisbon', 'Dallas', 'Bangkok', 'Los Angeles', 'Taipei', 'Milan', 'Seoul', 'Hong Kong', 'Kuala Lumpur', 'Florida', 'Washington', 'San Francisco', 'Osaka', 'Las Vegas', 'Damascus', 'Madina', 'Mecca', 'Santiago', 'Sao Paulo', 'Brasilia', 'Colombia', 'Interlaken', 'Candy', 'Bangalore', 'Wellington', 'Pune', 'Sharjah', 'Fujairah', 'Copenhagen', 'Amsterdam', 'London', 'Tripoli', 'Buenos Aires', 'Ecuador', 'Caracas', 'El Salvador', 'Nairobi', 'Ontario', 'Riyadh', 'Johannesburg', 'Cape Town', 'Colombo', 'Tibet', 'Bhutan', 'Novosibirsk', 'Saint Petersburg', 'Perth', 'Adelaide', 'Melbourne', 'Sydney', 'Tehran', 'Muscat', 'Brussels', 'Bali', 'Honolulu', 'Edinburgh', 'Wellington', 'Jakarta', 'Zurich', 'Dublin', 'Miami', 'Shanghai', 'Istanbul', 'Cairo', 'Prague', 'Vienna', 'Rio de Janeiro', 'Berlin', 'Tokyo', 'Mexico City', 'Munich', 'Boston', 'Baghdad', 'Warsaw', 'Johannesburg', 'Moscow', 'Mumbai', 'Delhi', 'Kolkata', 'Chennai', 'Lahore', 'Karachi', 'Dammam', 'Barcelona', 'Rome', 'Egypt', 'Cape Town', 'Krakow', 'Brazil', 'Florence', 'Peru', 'Paris', 'Canberra', 'Hamburg', 'Venice', 'Sydney', 'Rome', 'Maldives', 'Singapore']
	cities = list(set(cities)-set(done))
	for city in cities:
		landmarks(city, '')
開發者ID:jiahaoliuliu,項目名稱:WorldSpotlight,代碼行數:32,代碼來源:script.py

示例13: pointBase

# 需要導入模塊: from parse_rest.connection import ParseBatcher [as 別名]
# 或者: from parse_rest.connection.ParseBatcher import batch_save [as 別名]
                    firstT = [(50, y_os), ['1', (70, 40, 0)]]
                else:
                    firstT = one[count-1]
                v1 = (one[count][0][0] - firstT[0][0])*2 #- one[count-1][0][0]
                v2 = (one[count][0][1] - firstT[0][1])*2 #- one[count-1][0][1]
                if one[count][1][0] == '0':
                    t = 0
                else:
                    t = math.sqrt((v1)**2 + (v2)**2)/(float(one[count][1][0])*50)
                
                pointt = pointBase(id_line=count+1, id_rocket="Rocket"+str(rocket_num), pos_x=v1, pos_y=v2, speed=t, id_devices=device_ID)
                pointts.append(pointt)
                
                save_limit += 1
                if save_limit >= 49:
                    batcher.batch_save(pointts)
                    pointts = []
                    save_limit = 0
                
                all_object += 1
                
                
            if pointts:
                batcher.batch_save(pointts)

        exported = "sucesfully exported !!!"
        exported_time = time.time() + 3
        label = myfont.render("", 1, (200,0,0))
        screen.blit(label, (450, 580))
     
    if exported_time > time.time():           
開發者ID:lojsk,項目名稱:the_rocket,代碼行數:33,代碼來源:enemy_builder.py

示例14: register

# 需要導入模塊: from parse_rest.connection import ParseBatcher [as 別名]
# 或者: from parse_rest.connection.ParseBatcher import batch_save [as 別名]
from parse_rest.connection import register
from parse_rest.datatypes import Object
import re

register("8fbBNwG2gvwFskbc3SjlO34qmidJkF3pCVPTuVc0", "qNgE46H7emOYu3wsuRLGpMSZVeNxCUfCP81hFSxz", master_key="HhJryin0t8OMP2mOBC3UkJKqyIDFxXMfVGFLtxCq")
class Media(Object):
	pass

medias = []
media = Media.Query.all().limit(1000)
for m in media:
	if hasattr(m, "desc"):
		m.desc = re.sub("<[br|img].*>", "", m.desc)
		print m.desc
		medias.append(m)

def chunks(l, n):
    """ Yield successive n-sized chunks from l.
    """
    for i in xrange(0, len(l), n):
        yield l[i:i+n]

from parse_rest.connection import ParseBatcher
batcher = ParseBatcher()
for chunk in chunks(medias, 50):
	batcher.batch_save(chunk)
開發者ID:maxmeyers,項目名稱:uhhyeahdude,代碼行數:28,代碼來源:fixDesc.py

示例15: createSource

# 需要導入模塊: from parse_rest.connection import ParseBatcher [as 別名]
# 或者: from parse_rest.connection.ParseBatcher import batch_save [as 別名]
for entry in d.entries:
    # Is a real new with source? The rrs return images and other stuff
    if 'source' in entry:
        source = createSource(**entry.source)

        articles.append(createArticle(
            title=entry.title,
            description=entry.description,
            source=source,
            date=entry.published
        ))

printExplain("To save several objects, use the batcher")

batcher = ParseBatcher()
batcher.batch_save(articles)

print "Our news sources:"

for source in sources.values():
    printTab(source.title)

print "The news from ", sources.values()[0].title

for new in Article.Query.filter(source=sources.values()[0]):
    printSubTitle(new.title)
    print new.description

printTitle("Conclusion")
print """
Parse.com provide a easy way of store/query data, with not admin skills.
開發者ID:jcmoriar,項目名稱:WeatherGame,代碼行數:33,代碼來源:pyToParse.py


注:本文中的parse_rest.connection.ParseBatcher.batch_save方法示例由純淨天空整理自Github/MSDocs等開源代碼及文檔管理平台,相關代碼片段篩選自各路編程大神貢獻的開源項目,源碼版權歸原作者所有,傳播和使用請參考對應項目的License;未經允許,請勿轉載。